Start with a general question
Can I automatically detect emails that are SPAM that are not?
Make it concrete
Can I use quantitative characteristics of the emails to classify them as SPAM/HAM?
http://rss.acs.unt.edu/Rdoc/library/kernlab/html/spam.html
Dear Jeff,
Can you send me your address so I can send you the invitation?
Thanks,
Ben
Dear Jeff,
Can send me your address so I can send the invitation?
Thanks,
Ben
Frequency of you \(= 2/17 = 0.118\)
library(kernlab)
data(spam)
str(spam)
## 'data.frame': 4601 obs. of 58 variables:
## $ make : num 0 0.21 0.06 0 0 0 0 0 0.15 0.06 ...
## $ address : num 0.64 0.28 0 0 0 0 0 0 0 0.12 ...
## $ all : num 0.64 0.5 0.71 0 0 0 0 0 0.46 0.77 ...
## $ num3d : num 0 0 0 0 0 0 0 0 0 0 ...
## $ our : num 0.32 0.14 1.23 0.63 0.63 1.85 1.92 1.88 0.61 0.19 ...
## $ over : num 0 0.28 0.19 0 0 0 0 0 0 0.32 ...
## $ remove : num 0 0.21 0.19 0.31 0.31 0 0 0 0.3 0.38 ...
## $ internet : num 0 0.07 0.12 0.63 0.63 1.85 0 1.88 0 0 ...
## $ order : num 0 0 0.64 0.31 0.31 0 0 0 0.92 0.06 ...
## $ mail : num 0 0.94 0.25 0.63 0.63 0 0.64 0 0.76 0 ...
## $ receive : num 0 0.21 0.38 0.31 0.31 0 0.96 0 0.76 0 ...
## $ will : num 0.64 0.79 0.45 0.31 0.31 0 1.28 0 0.92 0.64 ...
## $ people : num 0 0.65 0.12 0.31 0.31 0 0 0 0 0.25 ...
## $ report : num 0 0.21 0 0 0 0 0 0 0 0 ...
## $ addresses : num 0 0.14 1.75 0 0 0 0 0 0 0.12 ...
## $ free : num 0.32 0.14 0.06 0.31 0.31 0 0.96 0 0 0 ...
## $ business : num 0 0.07 0.06 0 0 0 0 0 0 0 ...
## $ email : num 1.29 0.28 1.03 0 0 0 0.32 0 0.15 0.12 ...
## $ you : num 1.93 3.47 1.36 3.18 3.18 0 3.85 0 1.23 1.67 ...
## $ credit : num 0 0 0.32 0 0 0 0 0 3.53 0.06 ...
## $ your : num 0.96 1.59 0.51 0.31 0.31 0 0.64 0 2 0.71 ...
## $ font : num 0 0 0 0 0 0 0 0 0 0 ...
## $ num000 : num 0 0.43 1.16 0 0 0 0 0 0 0.19 ...
## $ money : num 0 0.43 0.06 0 0 0 0 0 0.15 0 ...
## $ hp : num 0 0 0 0 0 0 0 0 0 0 ...
## $ hpl : num 0 0 0 0 0 0 0 0 0 0 ...
## $ george : num 0 0 0 0 0 0 0 0 0 0 ...
## $ num650 : num 0 0 0 0 0 0 0 0 0 0 ...
## $ lab : num 0 0 0 0 0 0 0 0 0 0 ...
## $ labs : num 0 0 0 0 0 0 0 0 0 0 ...
## $ telnet : num 0 0 0 0 0 0 0 0 0 0 ...
## $ num857 : num 0 0 0 0 0 0 0 0 0 0 ...
## $ data : num 0 0 0 0 0 0 0 0 0.15 0 ...
## $ num415 : num 0 0 0 0 0 0 0 0 0 0 ...
## $ num85 : num 0 0 0 0 0 0 0 0 0 0 ...
## $ technology : num 0 0 0 0 0 0 0 0 0 0 ...
## $ num1999 : num 0 0.07 0 0 0 0 0 0 0 0 ...
## $ parts : num 0 0 0 0 0 0 0 0 0 0 ...
## $ pm : num 0 0 0 0 0 0 0 0 0 0 ...
## $ direct : num 0 0 0.06 0 0 0 0 0 0 0 ...
## $ cs : num 0 0 0 0 0 0 0 0 0 0 ...
## $ meeting : num 0 0 0 0 0 0 0 0 0 0 ...
## $ original : num 0 0 0.12 0 0 0 0 0 0.3 0 ...
## $ project : num 0 0 0 0 0 0 0 0 0 0.06 ...
## $ re : num 0 0 0.06 0 0 0 0 0 0 0 ...
## $ edu : num 0 0 0.06 0 0 0 0 0 0 0 ...
## $ table : num 0 0 0 0 0 0 0 0 0 0 ...
## $ conference : num 0 0 0 0 0 0 0 0 0 0 ...
## $ charSemicolon : num 0 0 0.01 0 0 0 0 0 0 0.04 ...
## $ charRoundbracket : num 0 0.132 0.143 0.137 0.135 0.223 0.054 0.206 0.271 0.03 ...
## $ charSquarebracket: num 0 0 0 0 0 0 0 0 0 0 ...
## $ charExclamation : num 0.778 0.372 0.276 0.137 0.135 0 0.164 0 0.181 0.244 ...
## $ charDollar : num 0 0.18 0.184 0 0 0 0.054 0 0.203 0.081 ...
## $ charHash : num 0 0.048 0.01 0 0 0 0 0 0.022 0 ...
## $ capitalAve : num 3.76 5.11 9.82 3.54 3.54 ...
## $ capitalLong : num 61 101 485 40 40 15 4 11 445 43 ...
## $ capitalTotal : num 278 1028 2259 191 191 ...
## $ type : Factor w/ 2 levels "nonspam","spam": 2 2 2 2 2 2 2 2 2 2 ...
plot(density(spam$your[spam$type=="nonspam"]),
col="blue",main="",xlab="Frequency of 'your'")
lines(density(spam$your[spam$type=="spam"]),col="red")
Our algorithm
plot(density(spam$your[spam$type=="nonspam"]),
col="blue",main="",xlab="Frequency of 'your'")
lines(density(spam$your[spam$type=="spam"]),col="red")
abline(v=0.5,col="black")
prediction <- ifelse(spam$your > 0.5,"spam","nonspam")
head(table(prediction,spam$type)/length(spam$type),10)
##
## prediction nonspam spam
## nonspam 0.4590306 0.1017170
## spam 0.1469246 0.2923278
Accuracy \(\approx 0.459 + 0.292 = 0.751\)
In Sample Error: The error rate you get on the same data set you used to build your predictor. Sometimes called resubstitution error.
Out of Sample Error: The error rate you get on a new data set. Sometimes called generalization error.
Key ideas
library(kernlab); data(spam); set.seed(333)
smallSpam <- spam[sample(dim(spam)[1],size=10),]
spamLabel <- (smallSpam$type=="spam")*1 + 1
plot(smallSpam$capitalAve,col=spamLabel)
Apply Rule 1 to smallSpam
rule1 <- function(x){
prediction <- rep(NA,length(x))
prediction[x > 2.7] <- "spam"
prediction[x < 2.40] <- "nonspam"
prediction[(x >= 2.40 & x <= 2.45)] <- "spam"
prediction[(x > 2.45 & x <= 2.70)] <- "nonspam"
return(prediction)
}
head(table(rule1(smallSpam$capitalAve),smallSpam$type),10)
##
## nonspam spam
## nonspam 5 0
## spam 0 5
Apply Rule 2 to smallSpam
rule2 <- function(x){
prediction <- rep(NA,length(x))
prediction[x > 2.8] <- "spam"
prediction[x <= 2.8] <- "nonspam"
return(prediction)
}
head(table(rule2(smallSpam$capitalAve),smallSpam$type),10)
##
## nonspam spam
## nonspam 5 1
## spam 0 4
Apply to complete spam data
head(table(rule1(spam$capitalAve),spam$type),5)
##
## nonspam spam
## nonspam 2141 588
## spam 647 1225
head(table(rule2(spam$capitalAve),spam$type),5)
##
## nonspam spam
## nonspam 2224 642
## spam 564 1171
mean(rule1(spam$capitalAve)==spam$type)
## [1] 0.7315801
mean(rule2(spam$capitalAve)==spam$type)
## [1] 0.7378831
sum(rule1(spam$capitalAve)==spam$type)
## [1] 3366
sum(rule2(spam$capitalAve)==spam$type)
## [1] 3395
http://en.wikipedia.org/wiki/Overfitting
In general, Positive = identified and negative = rejected. Therefore:
True positive = correctly identified
False positive = incorrectly identified
True negative = correctly rejected
False negative = incorrectly rejected
Medical testing example:
True positive = Sick people correctly diagnosed as sick
False positive= Healthy people incorrectly identified as sick
True negative = Healthy people correctly identified as healthy
False negative = Sick people incorrectly identified as healthy.
http://en.wikipedia.org/wiki/Sensitivity_and_specificity
http://en.wikipedia.org/wiki/Sensitivity_and_specificity
http://www.biostat.jhsph.edu/~iruczins/teaching/140.615/
Mean squared error (MSE):
\[\frac{1}{n} \sum_{i=1}^n (Prediction_i - Truth_i)^2\]
Root mean squared error (RMSE):
\[\sqrt{\frac{1}{n} \sum_{i=1}^n(Prediction_i - Truth_i)^2}\]
http://en.wikipedia.org/wiki/Receiver_operating_characteristic
Approach:
Use the training set
Split it into training/test sets
Build a model on the training set
Evaluate on the test set
Repeat and average the estimated errors
Used for:
Picking variables to include in a model
Picking the type of prediction function to use
Picking the parameters in the prediction function
Comparing different predictors
library(caret); library(kernlab); data(spam)
inTrain <- createDataPartition(y=spam$type,
p=0.75, list=FALSE)
training1 <- spam[inTrain,]
testing <- spam[-inTrain,]
dim(training1)
## [1] 3451 58
set.seed(32343)
modelFit <- train(type ~.,data=training1, method="glm")
modelFit
## Generalized Linear Model
##
## 3451 samples
## 57 predictors
## 2 classes: 'nonspam', 'spam'
##
## No pre-processing
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 3451, 3451, 3451, 3451, 3451, 3451, ...
## Resampling results:
##
## Accuracy Kappa
## 0.9122402 0.8140946
##
##
modelFit <- train(type ~.,data=training1, method="glm")
modelFit$finalModel
##
## Call: NULL
##
## Coefficients:
## (Intercept) make address
## -1.381e+14 -2.321e+14 -1.302e+14
## all num3d our
## 7.966e+13 7.800e+13 2.235e+14
## over remove internet
## 1.187e+14 3.078e+14 2.496e+14
## order mail receive
## 2.880e+14 -3.152e+13 1.064e+14
## will people report
## -7.044e+13 -2.164e+14 -4.492e+13
## addresses free business
## -1.099e+13 1.871e+14 9.344e+13
## email you credit
## 1.571e+14 4.467e+13 2.627e+13
## your font num000
## 2.329e+14 5.361e+13 3.283e+14
## money hp hpl
## 1.870e+14 -2.275e+14 -1.082e+14
## george num650 lab
## -2.175e+14 1.056e+14 -1.762e+14
## labs telnet num857
## -3.168e+13 -7.696e+14 2.227e+14
## data num415 num85
## -2.003e+14 -4.322e+14 -1.296e+14
## technology num1999 parts
## 2.703e+14 -2.869e+12 -2.241e+14
## pm direct cs
## -1.209e+14 7.148e+13 -3.497e+14
## meeting original project
## -2.839e+14 -3.522e+14 -3.326e+14
## re edu table
## -2.232e+14 -3.060e+14 -4.648e+14
## conference charSemicolon charRoundbracket
## -5.708e+14 -5.086e+14 -1.671e+14
## charSquarebracket charExclamation charDollar
## -1.779e+14 1.882e+14 1.234e+15
## charHash capitalAve capitalLong
## 3.029e+14 4.115e+12 5.607e+11
## capitalTotal
## 2.183e+11
##
## Degrees of Freedom: 3450 Total (i.e. Null); 3393 Residual
## Null Deviance: 4628
## Residual Deviance: 30930 AIC: 31040
predictions <- predict(modelFit,newdata=testing)
head(predictions,10)
## [1] spam spam spam spam nonspam spam spam spam
## [9] spam spam
## Levels: nonspam spam
confusionMatrix(predictions,testing$type)
## Confusion Matrix and Statistics
##
## Reference
## Prediction nonspam spam
## nonspam 555 14
## spam 142 439
##
## Accuracy : 0.8643
## 95% CI : (0.8432, 0.8836)
## No Information Rate : 0.6061
## P-Value [Acc > NIR] : < 2.2e-16
##
## Kappa : 0.7293
## Mcnemar's Test P-Value : < 2.2e-16
##
## Sensitivity : 0.7963
## Specificity : 0.9691
## Pos Pred Value : 0.9754
## Neg Pred Value : 0.7556
## Prevalence : 0.6061
## Detection Rate : 0.4826
## Detection Prevalence : 0.4948
## Balanced Accuracy : 0.8827
##
## 'Positive' Class : nonspam
##
library(caret); library(kernlab); data(spam)
inTrain <- createDataPartition(y=spam$type,
p=0.75, list=FALSE)
training2 <- spam[inTrain,]
testing <- spam[-inTrain,]
dim(training2)
## [1] 3451 58
set.seed(32323)
folds <- createFolds(y=spam$type,k=10,
list=TRUE,returnTrain=TRUE)
sapply(folds,length)
## Fold01 Fold02 Fold03 Fold04 Fold05 Fold06 Fold07 Fold08 Fold09 Fold10
## 4141 4140 4141 4142 4140 4142 4141 4141 4140 4141
folds[[1]][1:10]
## [1] 1 2 3 4 5 6 7 8 9 10
set.seed(32323)
folds <- createFolds(y=spam$type,k=10,
list=TRUE,returnTrain=FALSE)
sapply(folds,length)
## Fold01 Fold02 Fold03 Fold04 Fold05 Fold06 Fold07 Fold08 Fold09 Fold10
## 460 461 460 459 461 459 460 460 461 460
folds[[1]][1:10]
## [1] 24 27 32 40 41 43 55 58 63 68
set.seed(32323)
folds <- createResample(y=spam$type,times=10,
list=TRUE)
sapply(folds,length)
## Resample01 Resample02 Resample03 Resample04 Resample05 Resample06
## 4601 4601 4601 4601 4601 4601
## Resample07 Resample08 Resample09 Resample10
## 4601 4601 4601 4601
folds[[1]][1:10]
## [1] 1 2 3 3 3 5 5 7 8 12
set.seed(32323)
tme <- 1:1000
folds <- createTimeSlices(y=tme,initialWindow=20,
horizon=10)
names(folds)
## [1] "train" "test"
folds$train[[1]]
## [1] 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
folds$test[[1]]
## [1] 21 22 23 24 25 26 27 28 29 30
library(caret); library(kernlab); data(spam)
inTrain <- createDataPartition(y=spam$type,
p=0.75, list=FALSE)
training3 <- spam[inTrain,]
testing <- spam[-inTrain,]
modelFit <- train(type ~.,data=training3, method="glm")
modelFit
## Generalized Linear Model
##
## 3451 samples
## 57 predictors
## 2 classes: 'nonspam', 'spam'
##
## No pre-processing
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 3451, 3451, 3451, 3451, 3451, 3451, ...
## Resampling results:
##
## Accuracy Kappa
## 0.9167223 0.8245611
##
##
args(train.default)
## function (x, y, method = "rf", preProcess = NULL, ..., weights = NULL,
## metric = ifelse(is.factor(y), "Accuracy", "RMSE"), maximize = ifelse(metric %in%
## c("RMSE", "logLoss"), FALSE, TRUE), trControl = trainControl(),
## tuneGrid = NULL, tuneLength = 3)
## NULL
Continous outcomes: * RMSE = Root mean squared error * RSquared = \(R^2\) from regression models
Categorical outcomes: * Accuracy = Fraction correct * Kappa = A measure of concordance
args(trainControl)
## function (method = "boot", number = ifelse(grepl("cv", method),
## 10, 25), repeats = ifelse(grepl("cv", method), 1, number),
## p = 0.75, search = "grid", initialWindow = NULL, horizon = 1,
## fixedWindow = TRUE, verboseIter = FALSE, returnData = TRUE,
## returnResamp = "final", savePredictions = FALSE, classProbs = FALSE,
## summaryFunction = defaultSummary, selectionFunction = "best",
## preProcOptions = list(thresh = 0.95, ICAcomp = 3, k = 5),
## sampling = NULL, index = NULL, indexOut = NULL, indexFinal = NULL,
## timingSamps = 0, predictionBounds = rep(FALSE, 2), seeds = NA,
## adaptive = list(min = 5, alpha = 0.05, method = "gls", complete = TRUE),
## trim = FALSE, allowParallel = TRUE)
## NULL
set.seed(1235)
modelFit2 <- train(type ~.,data=training3, method="glm")
modelFit2
## Generalized Linear Model
##
## 3451 samples
## 57 predictors
## 2 classes: 'nonspam', 'spam'
##
## No pre-processing
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 3451, 3451, 3451, 3451, 3451, 3451, ...
## Resampling results:
##
## Accuracy Kappa
## 0.9152739 0.8213541
##
##
set.seed(1235)
modelFit3 <- train(type ~.,data=training3, method="glm")
modelFit3
## Generalized Linear Model
##
## 3451 samples
## 57 predictors
## 2 classes: 'nonspam', 'spam'
##
## No pre-processing
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 3451, 3451, 3451, 3451, 3451, 3451, ...
## Resampling results:
##
## Accuracy Kappa
## 0.9152739 0.8213541
##
##
Image Credit http://www.cahs-media.org/the-high-cost-of-low-wages
Data from: ISLR package from the book: Introduction to statistical learning
library(ISLR)
library(ggplot2)
library(caret)
library(Hmisc)
library(gridExtra)
data(Wage)
summary(Wage)
## year age sex maritl
## Min. :2003 Min. :18.00 1. Male :3000 1. Never Married: 648
## 1st Qu.:2004 1st Qu.:33.75 2. Female: 0 2. Married :2074
## Median :2006 Median :42.00 3. Widowed : 19
## Mean :2006 Mean :42.41 4. Divorced : 204
## 3rd Qu.:2008 3rd Qu.:51.00 5. Separated : 55
## Max. :2009 Max. :80.00
##
## race education region
## 1. White:2480 1. < HS Grad :268 2. Middle Atlantic :3000
## 2. Black: 293 2. HS Grad :971 1. New England : 0
## 3. Asian: 190 3. Some College :650 3. East North Central: 0
## 4. Other: 37 4. College Grad :685 4. West North Central: 0
## 5. Advanced Degree:426 5. South Atlantic : 0
## 6. East South Central: 0
## (Other) : 0
## jobclass health health_ins logwage
## 1. Industrial :1544 1. <=Good : 858 1. Yes:2083 Min. :3.000
## 2. Information:1456 2. >=Very Good:2142 2. No : 917 1st Qu.:4.447
## Median :4.653
## Mean :4.654
## 3rd Qu.:4.857
## Max. :5.763
##
## wage
## Min. : 20.09
## 1st Qu.: 85.38
## Median :104.92
## Mean :111.70
## 3rd Qu.:128.68
## Max. :318.34
##
inTrain <- createDataPartition(y=Wage$wage,
p=0.7, list=FALSE)
training4 <- Wage[inTrain,]
testing <- Wage[-inTrain,]
dim(training4); dim(testing)
## [1] 2102 12
## [1] 898 12
featurePlot(x=training4[,c("age","education","jobclass")],
y = training4$wage,
plot="pairs")
qplot(age,wage,data=training4)
qplot(age,wage,colour=jobclass,data=training4)
qq <- qplot(age,wage,colour=education,data=training4)
qq + geom_smooth(method='lm',formula=y~x)
cutWage <- cut2(training4$wage,g=3)
table(cutWage)
## cutWage
## [ 20.9, 93) [ 93.0,119) [118.9,318]
## 713 715 674
p1 <- qplot(cutWage,age, data=training4,fill=cutWage,
geom=c("boxplot"))
p1
library(gridExtra)
p2 <- qplot(cutWage,age, data=training4,fill=cutWage,
geom=c("boxplot","jitter"))
grid.arrange(p1,p2,ncol=2)
t1 <- table(cutWage,training4$jobclass)
t1
##
## cutWage 1. Industrial 2. Information
## [ 20.9, 93) 446 267
## [ 93.0,119) 358 357
## [118.9,318] 265 409
prop.table(t1,1)
##
## cutWage 1. Industrial 2. Information
## [ 20.9, 93) 0.6255259 0.3744741
## [ 93.0,119) 0.5006993 0.4993007
## [118.9,318] 0.3931751 0.6068249
qplot(wage,colour=education,data=training4,geom="density")
library(caret); library(RANN); library(kernlab); data(spam)
inTrain <- createDataPartition(y=spam$type,
p=0.75, list=FALSE)
training <- spam[inTrain,]
testing <- spam[-inTrain,]
hist(training$capitalAve,main="",xlab="ave. capital run length")
mean(training$capitalAve)
## [1] 4.716994
sd(training$capitalAve)
## [1] 26.82555
trainCapAve <- training$capitalAve
trainCapAveS <- (trainCapAve - mean(trainCapAve))/sd(trainCapAve)
mean(trainCapAveS)
## [1] -6.935532e-18
sd(trainCapAveS)
## [1] 1
testCapAve <- testing$capitalAve
testCapAveS <- (testCapAve - mean(trainCapAve))/sd(trainCapAve)
mean(testCapAveS)
## [1] 0.07077199
sd(testCapAveS)
## [1] 1.610786
preObj <- preProcess(training[,-58],method=c("center","scale"))
trainCapAveS <- predict(preObj,training[,-58])$capitalAve
mean(trainCapAveS)
## [1] -6.935532e-18
sd(trainCapAveS)
## [1] 1
testCapAveS <- predict(preObj,testing[,-58])$capitalAve
mean(testCapAveS)
## [1] 0.07077199
sd(testCapAveS)
## [1] 1.610786
set.seed(32343)
modelFit <- train(type ~.,data=training,
preProcess=c("center","scale"),method="glm")
modelFit
## Generalized Linear Model
##
## 3451 samples
## 57 predictors
## 2 classes: 'nonspam', 'spam'
##
## Pre-processing: centered (57), scaled (57)
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 3451, 3451, 3451, 3451, 3451, 3451, ...
## Resampling results:
##
## Accuracy Kappa
## 0.9208656 0.8332911
##
##
preObj <- preProcess(training[,-58],method=c("BoxCox"))
trainCapAveS <- predict(preObj,training[,-58])$capitalAve
par(mfrow=c(1,2)); hist(trainCapAveS); qqnorm(trainCapAveS)
set.seed(13343)
# Make some values NA
training$capAve <- training$capitalAve
selectNA <- rbinom(dim(training)[1],size=1,prob=0.05)==1
training$capAve[selectNA] <- NA
# Impute and standardize
preObj <- preProcess(training[,-58],method="knnImpute")
capAve <- predict(preObj,training[,-58])$capAve
# Standardize true values
capAveTruth <- training$capitalAve
capAveTruth <- (capAveTruth-mean(capAveTruth))/sd(capAveTruth)
quantile(capAve - capAveTruth)
## 0% 25% 50% 75% 100%
## -1.1884293998 -0.0008836469 0.0005853112 0.0012905270 1.0145601207
quantile((capAve - capAveTruth)[selectNA])
## 0% 25% 50% 75% 100%
## -1.188429400 -0.011925252 0.003908074 0.025463933 1.014560121
quantile((capAve - capAveTruth)[!selectNA])
## 0% 25% 50% 75% 100%
## -0.9757406838 -0.0008010270 0.0005776479 0.0012414810 0.0018085649
Level 1: From raw data to covariate
Level 2: Transforming tidy covariates
library(kernlab);data(spam)
spam$capitalAveSq <- spam$capitalAve^2
library(ISLR); library(caret); data(Wage);
inTrain <- createDataPartition(y=Wage$wage,
p=0.7, list=FALSE)
training <- Wage[inTrain,]; testing <- Wage[-inTrain,]
Basic idea - convert factor variables to indicator variables
table(training$jobclass)
##
## 1. Industrial 2. Information
## 1051 1051
dummies <- dummyVars(wage ~ jobclass,data=training)
head(predict(dummies,newdata=training))
## jobclass.1. Industrial jobclass.2. Information
## 86582 0 1
## 161300 1 0
## 155159 0 1
## 11443 0 1
## 376662 0 1
## 450601 1 0
nsv <- nearZeroVar(training,saveMetrics=TRUE)
nsv
## freqRatio percentUnique zeroVar nzv
## year 1.037356 0.33301618 FALSE FALSE
## age 1.027027 2.85442436 FALSE FALSE
## sex 0.000000 0.04757374 TRUE TRUE
## maritl 3.272931 0.23786870 FALSE FALSE
## race 8.938776 0.19029496 FALSE FALSE
## education 1.389002 0.23786870 FALSE FALSE
## region 0.000000 0.04757374 TRUE TRUE
## jobclass 1.000000 0.09514748 FALSE FALSE
## health 2.468647 0.09514748 FALSE FALSE
## health_ins 2.352472 0.09514748 FALSE FALSE
## logwage 1.061728 19.17221694 FALSE FALSE
## wage 1.061728 19.17221694 FALSE FALSE
library(splines)
bsBasis <- bs(training$age,df=3)
head(bsBasis,10)
## 1 2 3
## [1,] 0.2368501 0.02537679 0.000906314
## [2,] 0.4163380 0.32117502 0.082587862
## [3,] 0.4308138 0.29109043 0.065560908
## [4,] 0.3625256 0.38669397 0.137491189
## [5,] 0.3063341 0.42415495 0.195763821
## [6,] 0.4241549 0.30633413 0.073747105
## [7,] 0.3776308 0.09063140 0.007250512
## [8,] 0.4443582 0.22759810 0.038858212
## [9,] 0.4422183 0.19539878 0.028779665
## [10,] 0.3625256 0.38669397 0.137491189
See also: ns(),poly()
lm1 <- lm(wage ~ bsBasis,data=training)
plot(training$age,training$wage,pch=19,cex=0.5)
points(training$age,predict(lm1,newdata=training),col="red",pch=19,cex=0.5)
predict(bsBasis,age=testing$age)
## 1 2 3
## [1,] 0.236850055 0.0253767916 9.063140e-04
## [2,] 0.416337988 0.3211750193 8.258786e-02
## [3,] 0.430813836 0.2910904300 6.556091e-02
## [4,] 0.362525595 0.3866939680 1.374912e-01
## [5,] 0.306334128 0.4241549461 1.957638e-01
## [6,] 0.424154946 0.3063341278 7.374710e-02
## [7,] 0.377630828 0.0906313987 7.250512e-03
## [8,] 0.444358195 0.2275981001 3.885821e-02
## [9,] 0.442218287 0.1953987782 2.877966e-02
## [10,] 0.362525595 0.3866939680 1.374912e-01
## [11,] 0.275519452 0.4362391326 2.302373e-01
## [12,] 0.444093854 0.2114732637 3.356718e-02
## [13,] 0.443086838 0.2436977611 4.467792e-02
## [14,] 0.375000000 0.3750000000 1.250000e-01
## [15,] 0.430813836 0.2910904300 6.556091e-02
## [16,] 0.426168977 0.1482326877 1.718640e-02
## [17,] 0.000000000 0.0000000000 0.000000e+00
## [18,] 0.291090430 0.4308138364 2.125348e-01
## [19,] 0.349346279 0.3975319727 1.507880e-01
## [20,] 0.417093250 0.1331148669 1.416116e-02
## [21,] 0.426168977 0.1482326877 1.718640e-02
## [22,] 0.438655970 0.1794501695 2.447048e-02
## [23,] 0.275519452 0.4362391326 2.302373e-01
## [24,] 0.266544426 0.0339238361 1.439193e-03
## [25,] 0.406028666 0.1184250277 1.151354e-02
## [26,] 0.318229499 0.0540389715 3.058810e-03
## [27,] 0.340371253 0.0654560102 4.195898e-03
## [28,] 0.318229499 0.0540389715 3.058810e-03
## [29,] 0.430813836 0.2910904300 6.556091e-02
## [30,] 0.362525595 0.3866939680 1.374912e-01
## [31,] 0.444358195 0.2275981001 3.885821e-02
## [32,] 0.259696720 0.4403553087 2.488965e-01
## [33,] 0.266544426 0.0339238361 1.439193e-03
## [34,] 0.430813836 0.2910904300 6.556091e-02
## [35,] 0.204487093 0.0179374643 5.244873e-04
## [36,] 0.377630828 0.0906313987 7.250512e-03
## [37,] 0.195398778 0.4422182874 3.336033e-01
## [38,] 0.426168977 0.1482326877 1.718640e-02
## [39,] 0.077678661 0.3601465208 5.565901e-01
## [40,] 0.386693968 0.3625255950 1.132892e-01
## [41,] 0.375000000 0.3750000000 1.250000e-01
## [42,] 0.436239133 0.2755194522 5.800410e-02
## [43,] 0.442218287 0.1953987782 2.877966e-02
## [44,] 0.131453291 0.0066840657 1.132892e-04
## [45,] 0.243697761 0.4430868383 2.685375e-01
## [46,] 0.266544426 0.0339238361 1.439193e-03
## [47,] 0.443086838 0.2436977611 4.467792e-02
## [48,] 0.424154946 0.3063341278 7.374710e-02
## [49,] 0.424154946 0.3063341278 7.374710e-02
## [50,] 0.195398778 0.4422182874 3.336033e-01
## [51,] 0.291090430 0.4308138364 2.125348e-01
## [52,] 0.436239133 0.2755194522 5.800410e-02
## [53,] 0.266544426 0.0339238361 1.439193e-03
## [54,] 0.321175019 0.4163379880 1.798991e-01
## [55,] 0.397531973 0.3493462791 1.023338e-01
## [56,] 0.407438488 0.3355375785 9.210835e-02
## [57,] 0.426168977 0.1482326877 1.718640e-02
## [58,] 0.169380014 0.0116813803 2.685375e-04
## [59,] 0.416337988 0.3211750193 8.258786e-02
## [60,] 0.179450170 0.4386559699 3.574234e-01
## [61,] 0.306334128 0.4241549461 1.957638e-01
## [62,] 0.426168977 0.1482326877 1.718640e-02
## [63,] 0.362525595 0.3866939680 1.374912e-01
## [64,] 0.407438488 0.3355375785 9.210835e-02
## [65,] 0.440355309 0.2596967205 5.105149e-02
## [66,] 0.444093854 0.2114732637 3.356718e-02
## [67,] 0.433331375 0.1637029640 2.061445e-02
## [68,] 0.118425028 0.4060286664 4.640328e-01
## [69,] 0.442218287 0.1953987782 2.877966e-02
## [70,] 0.444358195 0.2275981001 3.885821e-02
## [71,] 0.436239133 0.2755194522 5.800410e-02
## [72,] 0.349346279 0.3975319727 1.507880e-01
## [73,] 0.444093854 0.2114732637 3.356718e-02
## [74,] 0.375000000 0.3750000000 1.250000e-01
## [75,] 0.436239133 0.2755194522 5.800410e-02
## [76,] 0.430813836 0.2910904300 6.556091e-02
## [77,] 0.227598100 0.4443581954 2.891855e-01
## [78,] 0.259696720 0.4403553087 2.488965e-01
## [79,] 0.266544426 0.0339238361 1.439193e-03
## [80,] 0.375000000 0.3750000000 1.250000e-01
## [81,] 0.444093854 0.2114732637 3.356718e-02
## [82,] 0.195398778 0.4422182874 3.336033e-01
## [83,] 0.335537578 0.4074384881 1.649156e-01
## [84,] 0.211473264 0.4440938538 3.108657e-01
## [85,] 0.407438488 0.3355375785 9.210835e-02
## [86,] 0.131453291 0.0066840657 1.132892e-04
## [87,] 0.195398778 0.4422182874 3.336033e-01
## [88,] 0.406028666 0.1184250277 1.151354e-02
## [89,] 0.243697761 0.4430868383 2.685375e-01
## [90,] 0.406028666 0.1184250277 1.151354e-02
## [91,] 0.169380014 0.0116813803 2.685375e-04
## [92,] 0.349346279 0.3975319727 1.507880e-01
## [93,] 0.424154946 0.3063341278 7.374710e-02
## [94,] 0.211473264 0.4440938538 3.108657e-01
## [95,] 0.443086838 0.2436977611 4.467792e-02
## [96,] 0.433331375 0.1637029640 2.061445e-02
## [97,] 0.433331375 0.1637029640 2.061445e-02
## [98,] 0.211473264 0.4440938538 3.108657e-01
## [99,] 0.444093854 0.2114732637 3.356718e-02
## [100,] 0.321175019 0.4163379880 1.798991e-01
## [101,] 0.259696720 0.4403553087 2.488965e-01
## [102,] 0.148232688 0.4261689772 4.084119e-01
## [103,] 0.433331375 0.1637029640 2.061445e-02
## [104,] 0.306334128 0.4241549461 1.957638e-01
## [105,] 0.416337988 0.3211750193 8.258786e-02
## [106,] 0.243697761 0.4430868383 2.685375e-01
## [107,] 0.386693968 0.3625255950 1.132892e-01
## [108,] 0.407438488 0.3355375785 9.210835e-02
## [109,] 0.407438488 0.3355375785 9.210835e-02
## [110,] 0.291090430 0.4308138364 2.125348e-01
## [111,] 0.349346279 0.3975319727 1.507880e-01
## [112,] 0.375000000 0.3750000000 1.250000e-01
## [113,] 0.426168977 0.1482326877 1.718640e-02
## [114,] 0.321175019 0.4163379880 1.798991e-01
## [115,] 0.443086838 0.2436977611 4.467792e-02
## [116,] 0.362525595 0.3866939680 1.374912e-01
## [117,] 0.444358195 0.2275981001 3.885821e-02
## [118,] 0.335537578 0.4074384881 1.649156e-01
## [119,] 0.362525595 0.3866939680 1.374912e-01
## [120,] 0.386693968 0.3625255950 1.132892e-01
## [121,] 0.397531973 0.3493462791 1.023338e-01
## [122,] 0.444358195 0.2275981001 3.885821e-02
## [123,] 0.424154946 0.3063341278 7.374710e-02
## [124,] 0.442218287 0.1953987782 2.877966e-02
## [125,] 0.335537578 0.4074384881 1.649156e-01
## [126,] 0.293645732 0.0435030714 2.148300e-03
## [127,] 0.392899701 0.1042386963 9.218388e-03
## [128,] 0.243697761 0.4430868383 2.685375e-01
## [129,] 0.377630828 0.0906313987 7.250512e-03
## [130,] 0.318229499 0.0540389715 3.058810e-03
## [131,] 0.443086838 0.2436977611 4.467792e-02
## [132,] 0.291090430 0.4308138364 2.125348e-01
## [133,] 0.433331375 0.1637029640 2.061445e-02
## [134,] 0.360146521 0.0776786613 5.584740e-03
## [135,] 0.266544426 0.0339238361 1.439193e-03
## [136,] 0.443086838 0.2436977611 4.467792e-02
## [137,] 0.318229499 0.0540389715 3.058810e-03
## [138,] 0.375000000 0.3750000000 1.250000e-01
## [139,] 0.169380014 0.0116813803 2.685375e-04
## [140,] 0.375000000 0.3750000000 1.250000e-01
## [141,] 0.266544426 0.0339238361 1.439193e-03
## [142,] 0.360146521 0.0776786613 5.584740e-03
## [143,] 0.442218287 0.1953987782 2.877966e-02
## [144,] 0.433331375 0.1637029640 2.061445e-02
## [145,] 0.243697761 0.4430868383 2.685375e-01
## [146,] 0.444358195 0.2275981001 3.885821e-02
## [147,] 0.440355309 0.2596967205 5.105149e-02
## [148,] 0.442218287 0.1953987782 2.877966e-02
## [149,] 0.179450170 0.4386559699 3.574234e-01
## [150,] 0.318229499 0.0540389715 3.058810e-03
## [151,] 0.442218287 0.1953987782 2.877966e-02
## [152,] 0.275519452 0.4362391326 2.302373e-01
## [153,] 0.438655970 0.1794501695 2.447048e-02
## [154,] 0.204487093 0.0179374643 5.244873e-04
## [155,] 0.407438488 0.3355375785 9.210835e-02
## [156,] 0.293645732 0.0435030714 2.148300e-03
## [157,] 0.430813836 0.2910904300 6.556091e-02
## [158,] 0.438655970 0.1794501695 2.447048e-02
## [159,] 0.306334128 0.4241549461 1.957638e-01
## [160,] 0.443086838 0.2436977611 4.467792e-02
## [161,] 0.426168977 0.1482326877 1.718640e-02
## [162,] 0.430813836 0.2910904300 6.556091e-02
## [163,] 0.227598100 0.4443581954 2.891855e-01
## [164,] 0.211473264 0.4440938538 3.108657e-01
## [165,] 0.375000000 0.3750000000 1.250000e-01
## [166,] 0.416337988 0.3211750193 8.258786e-02
## [167,] 0.426168977 0.1482326877 1.718640e-02
## [168,] 0.169380014 0.0116813803 2.685375e-04
## [169,] 0.443086838 0.2436977611 4.467792e-02
## [170,] 0.440355309 0.2596967205 5.105149e-02
## [171,] 0.438655970 0.1794501695 2.447048e-02
## [172,] 0.397531973 0.3493462791 1.023338e-01
## [173,] 0.433331375 0.1637029640 2.061445e-02
## [174,] 0.443086838 0.2436977611 4.467792e-02
## [175,] 0.259696720 0.4403553087 2.488965e-01
## [176,] 0.033923836 0.2665444262 6.980925e-01
## [177,] 0.360146521 0.0776786613 5.584740e-03
## [178,] 0.377630828 0.0906313987 7.250512e-03
## [179,] 0.360146521 0.0776786613 5.584740e-03
## [180,] 0.438655970 0.1794501695 2.447048e-02
## [181,] 0.444358195 0.2275981001 3.885821e-02
## [182,] 0.386693968 0.3625255950 1.132892e-01
## [183,] 0.416337988 0.3211750193 8.258786e-02
## [184,] 0.362525595 0.3866939680 1.374912e-01
## [185,] 0.243697761 0.4430868383 2.685375e-01
## [186,] 0.386693968 0.3625255950 1.132892e-01
## [187,] 0.440355309 0.2596967205 5.105149e-02
## [188,] 0.318229499 0.0540389715 3.058810e-03
## [189,] 0.424154946 0.3063341278 7.374710e-02
## [190,] 0.406028666 0.1184250277 1.151354e-02
## [191,] 0.407438488 0.3355375785 9.210835e-02
## [192,] 0.169380014 0.0116813803 2.685375e-04
## [193,] 0.321175019 0.4163379880 1.798991e-01
## [194,] 0.426168977 0.1482326877 1.718640e-02
## [195,] 0.444093854 0.2114732637 3.356718e-02
## [196,] 0.266544426 0.0339238361 1.439193e-03
## [197,] 0.360146521 0.0776786613 5.584740e-03
## [198,] 0.340371253 0.0654560102 4.195898e-03
## [199,] 0.291090430 0.4308138364 2.125348e-01
## [200,] 0.275519452 0.4362391326 2.302373e-01
## [201,] 0.195398778 0.4422182874 3.336033e-01
## [202,] 0.397531973 0.3493462791 1.023338e-01
## [203,] 0.335537578 0.4074384881 1.649156e-01
## [204,] 0.417093250 0.1331148669 1.416116e-02
## [205,] 0.243697761 0.4430868383 2.685375e-01
## [206,] 0.318229499 0.0540389715 3.058810e-03
## [207,] 0.335537578 0.4074384881 1.649156e-01
## [208,] 0.416337988 0.3211750193 8.258786e-02
## [209,] 0.169380014 0.0116813803 2.685375e-04
## [210,] 0.266544426 0.0339238361 1.439193e-03
## [211,] 0.438655970 0.1794501695 2.447048e-02
## [212,] 0.392899701 0.1042386963 9.218388e-03
## [213,] 0.335537578 0.4074384881 1.649156e-01
## [214,] 0.407438488 0.3355375785 9.210835e-02
## [215,] 0.416337988 0.3211750193 8.258786e-02
## [216,] 0.443086838 0.2436977611 4.467792e-02
## [217,] 0.436239133 0.2755194522 5.800410e-02
## [218,] 0.440355309 0.2596967205 5.105149e-02
## [219,] 0.266544426 0.0339238361 1.439193e-03
## [220,] 0.236850055 0.0253767916 9.063140e-04
## [221,] 0.349346279 0.3975319727 1.507880e-01
## [222,] 0.440355309 0.2596967205 5.105149e-02
## [223,] 0.377630828 0.0906313987 7.250512e-03
## [224,] 0.291090430 0.4308138364 2.125348e-01
## [225,] 0.204487093 0.0179374643 5.244873e-04
## [226,] 0.211473264 0.4440938538 3.108657e-01
## [227,] 0.443086838 0.2436977611 4.467792e-02
## [228,] 0.000000000 0.0000000000 1.000000e+00
## [229,] 0.443086838 0.2436977611 4.467792e-02
## [230,] 0.433331375 0.1637029640 2.061445e-02
## [231,] 0.291090430 0.4308138364 2.125348e-01
## [232,] 0.236850055 0.0253767916 9.063140e-04
## [233,] 0.444358195 0.2275981001 3.885821e-02
## [234,] 0.377630828 0.0906313987 7.250512e-03
## [235,] 0.090631399 0.3776308281 5.244873e-01
## [236,] 0.306334128 0.4241549461 1.957638e-01
## [237,] 0.318229499 0.0540389715 3.058810e-03
## [238,] 0.426168977 0.1482326877 1.718640e-02
## [239,] 0.321175019 0.4163379880 1.798991e-01
## [240,] 0.227598100 0.4443581954 2.891855e-01
## [241,] 0.416337988 0.3211750193 8.258786e-02
## [242,] 0.430813836 0.2910904300 6.556091e-02
## [243,] 0.377630828 0.0906313987 7.250512e-03
## [244,] 0.436239133 0.2755194522 5.800410e-02
## [245,] 0.204487093 0.0179374643 5.244873e-04
## [246,] 0.243697761 0.4430868383 2.685375e-01
## [247,] 0.417093250 0.1331148669 1.416116e-02
## [248,] 0.275519452 0.4362391326 2.302373e-01
## [249,] 0.442218287 0.1953987782 2.877966e-02
## [250,] 0.417093250 0.1331148669 1.416116e-02
## [251,] 0.362525595 0.3866939680 1.374912e-01
## [252,] 0.430813836 0.2910904300 6.556091e-02
## [253,] 0.321175019 0.4163379880 1.798991e-01
## [254,] 0.442218287 0.1953987782 2.877966e-02
## [255,] 0.090631399 0.0030210466 3.356718e-05
## [256,] 0.293645732 0.0435030714 2.148300e-03
## [257,] 0.360146521 0.0776786613 5.584740e-03
## [258,] 0.259696720 0.4403553087 2.488965e-01
## [259,] 0.397531973 0.3493462791 1.023338e-01
## [260,] 0.444093854 0.2114732637 3.356718e-02
## [261,] 0.204487093 0.0179374643 5.244873e-04
## [262,] 0.392899701 0.1042386963 9.218388e-03
## [263,] 0.430813836 0.2910904300 6.556091e-02
## [264,] 0.417093250 0.1331148669 1.416116e-02
## [265,] 0.386693968 0.3625255950 1.132892e-01
## [266,] 0.377630828 0.0906313987 7.250512e-03
## [267,] 0.424154946 0.3063341278 7.374710e-02
## [268,] 0.444093854 0.2114732637 3.356718e-02
## [269,] 0.397531973 0.3493462791 1.023338e-01
## [270,] 0.340371253 0.0654560102 4.195898e-03
## [271,] 0.204487093 0.0179374643 5.244873e-04
## [272,] 0.318229499 0.0540389715 3.058810e-03
## [273,] 0.417093250 0.1331148669 1.416116e-02
## [274,] 0.375000000 0.3750000000 1.250000e-01
## [275,] 0.318229499 0.0540389715 3.058810e-03
## [276,] 0.386693968 0.3625255950 1.132892e-01
## [277,] 0.444093854 0.2114732637 3.356718e-02
## [278,] 0.243697761 0.4430868383 2.685375e-01
## [279,] 0.407438488 0.3355375785 9.210835e-02
## [280,] 0.321175019 0.4163379880 1.798991e-01
## [281,] 0.436239133 0.2755194522 5.800410e-02
## [282,] 0.443086838 0.2436977611 4.467792e-02
## [283,] 0.433331375 0.1637029640 2.061445e-02
## [284,] 0.362525595 0.3866939680 1.374912e-01
## [285,] 0.426168977 0.1482326877 1.718640e-02
## [286,] 0.386693968 0.3625255950 1.132892e-01
## [287,] 0.375000000 0.3750000000 1.250000e-01
## [288,] 0.440355309 0.2596967205 5.105149e-02
## [289,] 0.243697761 0.4430868383 2.685375e-01
## [290,] 0.362525595 0.3866939680 1.374912e-01
## [291,] 0.444093854 0.2114732637 3.356718e-02
## [292,] 0.377630828 0.0906313987 7.250512e-03
## [293,] 0.424154946 0.3063341278 7.374710e-02
## [294,] 0.243697761 0.4430868383 2.685375e-01
## [295,] 0.416337988 0.3211750193 8.258786e-02
## [296,] 0.424154946 0.3063341278 7.374710e-02
## [297,] 0.416337988 0.3211750193 8.258786e-02
## [298,] 0.349346279 0.3975319727 1.507880e-01
## [299,] 0.195398778 0.4422182874 3.336033e-01
## [300,] 0.416337988 0.3211750193 8.258786e-02
## [301,] 0.426168977 0.1482326877 1.718640e-02
## [302,] 0.417093250 0.1331148669 1.416116e-02
## [303,] 0.362525595 0.3866939680 1.374912e-01
## [304,] 0.362525595 0.3866939680 1.374912e-01
## [305,] 0.444358195 0.2275981001 3.885821e-02
## [306,] 0.293645732 0.0435030714 2.148300e-03
## [307,] 0.375000000 0.3750000000 1.250000e-01
## [308,] 0.392899701 0.1042386963 9.218388e-03
## [309,] 0.275519452 0.4362391326 2.302373e-01
## [310,] 0.195398778 0.4422182874 3.336033e-01
## [311,] 0.275519452 0.4362391326 2.302373e-01
## [312,] 0.349346279 0.3975319727 1.507880e-01
## [313,] 0.436239133 0.2755194522 5.800410e-02
## [314,] 0.416337988 0.3211750193 8.258786e-02
## [315,] 0.386693968 0.3625255950 1.132892e-01
## [316,] 0.417093250 0.1331148669 1.416116e-02
## [317,] 0.392899701 0.1042386963 9.218388e-03
## [318,] 0.386693968 0.3625255950 1.132892e-01
## [319,] 0.433331375 0.1637029640 2.061445e-02
## [320,] 0.397531973 0.3493462791 1.023338e-01
## [321,] 0.416337988 0.3211750193 8.258786e-02
## [322,] 0.407438488 0.3355375785 9.210835e-02
## [323,] 0.397531973 0.3493462791 1.023338e-01
## [324,] 0.375000000 0.3750000000 1.250000e-01
## [325,] 0.438655970 0.1794501695 2.447048e-02
## [326,] 0.349346279 0.3975319727 1.507880e-01
## [327,] 0.407438488 0.3355375785 9.210835e-02
## [328,] 0.430813836 0.2910904300 6.556091e-02
## [329,] 0.424154946 0.3063341278 7.374710e-02
## [330,] 0.195398778 0.4422182874 3.336033e-01
## [331,] 0.442218287 0.1953987782 2.877966e-02
## [332,] 0.444093854 0.2114732637 3.356718e-02
## [333,] 0.440355309 0.2596967205 5.105149e-02
## [334,] 0.377630828 0.0906313987 7.250512e-03
## [335,] 0.349346279 0.3975319727 1.507880e-01
## [336,] 0.433331375 0.1637029640 2.061445e-02
## [337,] 0.318229499 0.0540389715 3.058810e-03
## [338,] 0.349346279 0.3975319727 1.507880e-01
## [339,] 0.440355309 0.2596967205 5.105149e-02
## [340,] 0.163702964 0.4333313752 3.823512e-01
## [341,] 0.340371253 0.0654560102 4.195898e-03
## [342,] 0.362525595 0.3866939680 1.374912e-01
## [343,] 0.440355309 0.2596967205 5.105149e-02
## [344,] 0.204487093 0.0179374643 5.244873e-04
## [345,] 0.416337988 0.3211750193 8.258786e-02
## [346,] 0.163702964 0.4333313752 3.823512e-01
## [347,] 0.227598100 0.4443581954 2.891855e-01
## [348,] 0.377630828 0.0906313987 7.250512e-03
## [349,] 0.416337988 0.3211750193 8.258786e-02
## [350,] 0.335537578 0.4074384881 1.649156e-01
## [351,] 0.306334128 0.4241549461 1.957638e-01
## [352,] 0.377630828 0.0906313987 7.250512e-03
## [353,] 0.397531973 0.3493462791 1.023338e-01
## [354,] 0.397531973 0.3493462791 1.023338e-01
## [355,] 0.444358195 0.2275981001 3.885821e-02
## [356,] 0.362525595 0.3866939680 1.374912e-01
## [357,] 0.397531973 0.3493462791 1.023338e-01
## [358,] 0.416337988 0.3211750193 8.258786e-02
## [359,] 0.424154946 0.3063341278 7.374710e-02
## [360,] 0.436239133 0.2755194522 5.800410e-02
## [361,] 0.275519452 0.4362391326 2.302373e-01
## [362,] 0.362525595 0.3866939680 1.374912e-01
## [363,] 0.321175019 0.4163379880 1.798991e-01
## [364,] 0.444093854 0.2114732637 3.356718e-02
## [365,] 0.275519452 0.4362391326 2.302373e-01
## [366,] 0.362525595 0.3866939680 1.374912e-01
## [367,] 0.375000000 0.3750000000 1.250000e-01
## [368,] 0.436239133 0.2755194522 5.800410e-02
## [369,] 0.362525595 0.3866939680 1.374912e-01
## [370,] 0.321175019 0.4163379880 1.798991e-01
## [371,] 0.340371253 0.0654560102 4.195898e-03
## [372,] 0.416337988 0.3211750193 8.258786e-02
## [373,] 0.236850055 0.0253767916 9.063140e-04
## [374,] 0.266544426 0.0339238361 1.439193e-03
## [375,] 0.397531973 0.3493462791 1.023338e-01
## [376,] 0.444093854 0.2114732637 3.356718e-02
## [377,] 0.417093250 0.1331148669 1.416116e-02
## [378,] 0.444358195 0.2275981001 3.885821e-02
## [379,] 0.407438488 0.3355375785 9.210835e-02
## [380,] 0.195398778 0.4422182874 3.336033e-01
## [381,] 0.406028666 0.1184250277 1.151354e-02
## [382,] 0.195398778 0.4422182874 3.336033e-01
## [383,] 0.416337988 0.3211750193 8.258786e-02
## [384,] 0.243697761 0.4430868383 2.685375e-01
## [385,] 0.266544426 0.0339238361 1.439193e-03
## [386,] 0.426168977 0.1482326877 1.718640e-02
## [387,] 0.424154946 0.3063341278 7.374710e-02
## [388,] 0.148232688 0.4261689772 4.084119e-01
## [389,] 0.306334128 0.4241549461 1.957638e-01
## [390,] 0.436239133 0.2755194522 5.800410e-02
## [391,] 0.392899701 0.1042386963 9.218388e-03
## [392,] 0.266544426 0.0339238361 1.439193e-03
## [393,] 0.349346279 0.3975319727 1.507880e-01
## [394,] 0.340371253 0.0654560102 4.195898e-03
## [395,] 0.321175019 0.4163379880 1.798991e-01
## [396,] 0.407438488 0.3355375785 9.210835e-02
## [397,] 0.444093854 0.2114732637 3.356718e-02
## [398,] 0.444358195 0.2275981001 3.885821e-02
## [399,] 0.442218287 0.1953987782 2.877966e-02
## [400,] 0.227598100 0.4443581954 2.891855e-01
## [401,] 0.417093250 0.1331148669 1.416116e-02
## [402,] 0.204487093 0.0179374643 5.244873e-04
## [403,] 0.442218287 0.1953987782 2.877966e-02
## [404,] 0.318229499 0.0540389715 3.058810e-03
## [405,] 0.397531973 0.3493462791 1.023338e-01
## [406,] 0.335537578 0.4074384881 1.649156e-01
## [407,] 0.442218287 0.1953987782 2.877966e-02
## [408,] 0.426168977 0.1482326877 1.718640e-02
## [409,] 0.349346279 0.3975319727 1.507880e-01
## [410,] 0.362525595 0.3866939680 1.374912e-01
## [411,] 0.306334128 0.4241549461 1.957638e-01
## [412,] 0.362525595 0.3866939680 1.374912e-01
## [413,] 0.406028666 0.1184250277 1.151354e-02
## [414,] 0.442218287 0.1953987782 2.877966e-02
## [415,] 0.046838810 0.0007678494 4.195898e-06
## [416,] 0.406028666 0.1184250277 1.151354e-02
## [417,] 0.436239133 0.2755194522 5.800410e-02
## [418,] 0.430813836 0.2910904300 6.556091e-02
## [419,] 0.424154946 0.3063341278 7.374710e-02
## [420,] 0.443086838 0.2436977611 4.467792e-02
## [421,] 0.430813836 0.2910904300 6.556091e-02
## [422,] 0.406028666 0.1184250277 1.151354e-02
## [423,] 0.195398778 0.4422182874 3.336033e-01
## [424,] 0.397531973 0.3493462791 1.023338e-01
## [425,] 0.291090430 0.4308138364 2.125348e-01
## [426,] 0.335537578 0.4074384881 1.649156e-01
## [427,] 0.318229499 0.0540389715 3.058810e-03
## [428,] 0.169380014 0.0116813803 2.685375e-04
## [429,] 0.436239133 0.2755194522 5.800410e-02
## [430,] 0.392899701 0.1042386963 9.218388e-03
## [431,] 0.227598100 0.4443581954 2.891855e-01
## [432,] 0.438655970 0.1794501695 2.447048e-02
## [433,] 0.406028666 0.1184250277 1.151354e-02
## [434,] 0.406028666 0.1184250277 1.151354e-02
## [435,] 0.266544426 0.0339238361 1.439193e-03
## [436,] 0.430813836 0.2910904300 6.556091e-02
## [437,] 0.424154946 0.3063341278 7.374710e-02
## [438,] 0.259696720 0.4403553087 2.488965e-01
## [439,] 0.440355309 0.2596967205 5.105149e-02
## [440,] 0.444093854 0.2114732637 3.356718e-02
## [441,] 0.243697761 0.4430868383 2.685375e-01
## [442,] 0.227598100 0.4443581954 2.891855e-01
## [443,] 0.444358195 0.2275981001 3.885821e-02
## [444,] 0.424154946 0.3063341278 7.374710e-02
## [445,] 0.065456010 0.3403712531 5.899768e-01
## [446,] 0.318229499 0.0540389715 3.058810e-03
## [447,] 0.397531973 0.3493462791 1.023338e-01
## [448,] 0.360146521 0.0776786613 5.584740e-03
## [449,] 0.436239133 0.2755194522 5.800410e-02
## [450,] 0.349346279 0.3975319727 1.507880e-01
## [451,] 0.444358195 0.2275981001 3.885821e-02
## [452,] 0.204487093 0.0179374643 5.244873e-04
## [453,] 0.392899701 0.1042386963 9.218388e-03
## [454,] 0.227598100 0.4443581954 2.891855e-01
## [455,] 0.436239133 0.2755194522 5.800410e-02
## [456,] 0.433331375 0.1637029640 2.061445e-02
## [457,] 0.444093854 0.2114732637 3.356718e-02
## [458,] 0.416337988 0.3211750193 8.258786e-02
## [459,] 0.243697761 0.4430868383 2.685375e-01
## [460,] 0.293645732 0.0435030714 2.148300e-03
## [461,] 0.377630828 0.0906313987 7.250512e-03
## [462,] 0.306334128 0.4241549461 1.957638e-01
## [463,] 0.335537578 0.4074384881 1.649156e-01
## [464,] 0.033923836 0.2665444262 6.980925e-01
## [465,] 0.133114867 0.4170932496 4.356307e-01
## [466,] 0.321175019 0.4163379880 1.798991e-01
## [467,] 0.335537578 0.4074384881 1.649156e-01
## [468,] 0.259696720 0.4403553087 2.488965e-01
## [469,] 0.406028666 0.1184250277 1.151354e-02
## [470,] 0.349346279 0.3975319727 1.507880e-01
## [471,] 0.430813836 0.2910904300 6.556091e-02
## [472,] 0.362525595 0.3866939680 1.374912e-01
## [473,] 0.321175019 0.4163379880 1.798991e-01
## [474,] 0.306334128 0.4241549461 1.957638e-01
## [475,] 0.443086838 0.2436977611 4.467792e-02
## [476,] 0.377630828 0.0906313987 7.250512e-03
## [477,] 0.416337988 0.3211750193 8.258786e-02
## [478,] 0.291090430 0.4308138364 2.125348e-01
## [479,] 0.416337988 0.3211750193 8.258786e-02
## [480,] 0.424154946 0.3063341278 7.374710e-02
## [481,] 0.442218287 0.1953987782 2.877966e-02
## [482,] 0.440355309 0.2596967205 5.105149e-02
## [483,] 0.335537578 0.4074384881 1.649156e-01
## [484,] 0.291090430 0.4308138364 2.125348e-01
## [485,] 0.430813836 0.2910904300 6.556091e-02
## [486,] 0.318229499 0.0540389715 3.058810e-03
## [487,] 0.430813836 0.2910904300 6.556091e-02
## [488,] 0.407438488 0.3355375785 9.210835e-02
## [489,] 0.386693968 0.3625255950 1.132892e-01
## [490,] 0.360146521 0.0776786613 5.584740e-03
## [491,] 0.236850055 0.0253767916 9.063140e-04
## [492,] 0.362525595 0.3866939680 1.374912e-01
## [493,] 0.236850055 0.0253767916 9.063140e-04
## [494,] 0.436239133 0.2755194522 5.800410e-02
## [495,] 0.375000000 0.3750000000 1.250000e-01
## [496,] 0.443086838 0.2436977611 4.467792e-02
## [497,] 0.440355309 0.2596967205 5.105149e-02
## [498,] 0.426168977 0.1482326877 1.718640e-02
## [499,] 0.236850055 0.0253767916 9.063140e-04
## [500,] 0.424154946 0.3063341278 7.374710e-02
## [501,] 0.266544426 0.0339238361 1.439193e-03
## [502,] 0.443086838 0.2436977611 4.467792e-02
## [503,] 0.266544426 0.0339238361 1.439193e-03
## [504,] 0.424154946 0.3063341278 7.374710e-02
## [505,] 0.243697761 0.4430868383 2.685375e-01
## [506,] 0.335537578 0.4074384881 1.649156e-01
## [507,] 0.211473264 0.4440938538 3.108657e-01
## [508,] 0.349346279 0.3975319727 1.507880e-01
## [509,] 0.416337988 0.3211750193 8.258786e-02
## [510,] 0.430813836 0.2910904300 6.556091e-02
## [511,] 0.416337988 0.3211750193 8.258786e-02
## [512,] 0.443086838 0.2436977611 4.467792e-02
## [513,] 0.349346279 0.3975319727 1.507880e-01
## [514,] 0.335537578 0.4074384881 1.649156e-01
## [515,] 0.392899701 0.1042386963 9.218388e-03
## [516,] 0.443086838 0.2436977611 4.467792e-02
## [517,] 0.293645732 0.0435030714 2.148300e-03
## [518,] 0.375000000 0.3750000000 1.250000e-01
## [519,] 0.444093854 0.2114732637 3.356718e-02
## [520,] 0.362525595 0.3866939680 1.374912e-01
## [521,] 0.360146521 0.0776786613 5.584740e-03
## [522,] 0.417093250 0.1331148669 1.416116e-02
## [523,] 0.179450170 0.4386559699 3.574234e-01
## [524,] 0.416337988 0.3211750193 8.258786e-02
## [525,] 0.275519452 0.4362391326 2.302373e-01
## [526,] 0.243697761 0.4430868383 2.685375e-01
## [527,] 0.444358195 0.2275981001 3.885821e-02
## [528,] 0.375000000 0.3750000000 1.250000e-01
## [529,] 0.236850055 0.0253767916 9.063140e-04
## [530,] 0.243697761 0.4430868383 2.685375e-01
## [531,] 0.397531973 0.3493462791 1.023338e-01
## [532,] 0.440355309 0.2596967205 5.105149e-02
## [533,] 0.054038972 0.3182294988 6.246727e-01
## [534,] 0.397531973 0.3493462791 1.023338e-01
## [535,] 0.444093854 0.2114732637 3.356718e-02
## [536,] 0.392899701 0.1042386963 9.218388e-03
## [537,] 0.275519452 0.4362391326 2.302373e-01
## [538,] 0.424154946 0.3063341278 7.374710e-02
## [539,] 0.417093250 0.1331148669 1.416116e-02
## [540,] 0.392899701 0.1042386963 9.218388e-03
## [541,] 0.291090430 0.4308138364 2.125348e-01
## [542,] 0.386693968 0.3625255950 1.132892e-01
## [543,] 0.291090430 0.4308138364 2.125348e-01
## [544,] 0.407438488 0.3355375785 9.210835e-02
## [545,] 0.386693968 0.3625255950 1.132892e-01
## [546,] 0.204487093 0.0179374643 5.244873e-04
## [547,] 0.211473264 0.4440938538 3.108657e-01
## [548,] 0.426168977 0.1482326877 1.718640e-02
## [549,] 0.416337988 0.3211750193 8.258786e-02
## [550,] 0.340371253 0.0654560102 4.195898e-03
## [551,] 0.417093250 0.1331148669 1.416116e-02
## [552,] 0.243697761 0.4430868383 2.685375e-01
## [553,] 0.397531973 0.3493462791 1.023338e-01
## [554,] 0.236850055 0.0253767916 9.063140e-04
## [555,] 0.275519452 0.4362391326 2.302373e-01
## [556,] 0.275519452 0.4362391326 2.302373e-01
## [557,] 0.204487093 0.0179374643 5.244873e-04
## [558,] 0.416337988 0.3211750193 8.258786e-02
## [559,] 0.243697761 0.4430868383 2.685375e-01
## [560,] 0.377630828 0.0906313987 7.250512e-03
## [561,] 0.386693968 0.3625255950 1.132892e-01
## [562,] 0.442218287 0.1953987782 2.877966e-02
## [563,] 0.375000000 0.3750000000 1.250000e-01
## [564,] 0.392899701 0.1042386963 9.218388e-03
## [565,] 0.335537578 0.4074384881 1.649156e-01
## [566,] 0.065456010 0.3403712531 5.899768e-01
## [567,] 0.426168977 0.1482326877 1.718640e-02
## [568,] 0.444093854 0.2114732637 3.356718e-02
## [569,] 0.340371253 0.0654560102 4.195898e-03
## [570,] 0.444093854 0.2114732637 3.356718e-02
## [571,] 0.444358195 0.2275981001 3.885821e-02
## [572,] 0.335537578 0.4074384881 1.649156e-01
## [573,] 0.426168977 0.1482326877 1.718640e-02
## [574,] 0.417093250 0.1331148669 1.416116e-02
## [575,] 0.243697761 0.4430868383 2.685375e-01
## [576,] 0.444093854 0.2114732637 3.356718e-02
## [577,] 0.444093854 0.2114732637 3.356718e-02
## [578,] 0.392899701 0.1042386963 9.218388e-03
## [579,] 0.321175019 0.4163379880 1.798991e-01
## [580,] 0.131453291 0.0066840657 1.132892e-04
## [581,] 0.444093854 0.2114732637 3.356718e-02
## [582,] 0.340371253 0.0654560102 4.195898e-03
## [583,] 0.406028666 0.1184250277 1.151354e-02
## [584,] 0.340371253 0.0654560102 4.195898e-03
## [585,] 0.436239133 0.2755194522 5.800410e-02
## [586,] 0.340371253 0.0654560102 4.195898e-03
## [587,] 0.386693968 0.3625255950 1.132892e-01
## [588,] 0.291090430 0.4308138364 2.125348e-01
## [589,] 0.442218287 0.1953987782 2.877966e-02
## [590,] 0.090631399 0.3776308281 5.244873e-01
## [591,] 0.133114867 0.4170932496 4.356307e-01
## [592,] 0.442218287 0.1953987782 2.877966e-02
## [593,] 0.417093250 0.1331148669 1.416116e-02
## [594,] 0.046838810 0.0007678494 4.195898e-06
## [595,] 0.362525595 0.3866939680 1.374912e-01
## [596,] 0.443086838 0.2436977611 4.467792e-02
## [597,] 0.118425028 0.4060286664 4.640328e-01
## [598,] 0.433331375 0.1637029640 2.061445e-02
## [599,] 0.417093250 0.1331148669 1.416116e-02
## [600,] 0.424154946 0.3063341278 7.374710e-02
## [601,] 0.397531973 0.3493462791 1.023338e-01
## [602,] 0.291090430 0.4308138364 2.125348e-01
## [603,] 0.417093250 0.1331148669 1.416116e-02
## [604,] 0.275519452 0.4362391326 2.302373e-01
## [605,] 0.397531973 0.3493462791 1.023338e-01
## [606,] 0.416337988 0.3211750193 8.258786e-02
## [607,] 0.424154946 0.3063341278 7.374710e-02
## [608,] 0.266544426 0.0339238361 1.439193e-03
## [609,] 0.416337988 0.3211750193 8.258786e-02
## [610,] 0.275519452 0.4362391326 2.302373e-01
## [611,] 0.397531973 0.3493462791 1.023338e-01
## [612,] 0.444358195 0.2275981001 3.885821e-02
## [613,] 0.386693968 0.3625255950 1.132892e-01
## [614,] 0.436239133 0.2755194522 5.800410e-02
## [615,] 0.291090430 0.4308138364 2.125348e-01
## [616,] 0.195398778 0.4422182874 3.336033e-01
## [617,] 0.444358195 0.2275981001 3.885821e-02
## [618,] 0.377630828 0.0906313987 7.250512e-03
## [619,] 0.375000000 0.3750000000 1.250000e-01
## [620,] 0.417093250 0.1331148669 1.416116e-02
## [621,] 0.392899701 0.1042386963 9.218388e-03
## [622,] 0.291090430 0.4308138364 2.125348e-01
## [623,] 0.438655970 0.1794501695 2.447048e-02
## [624,] 0.417093250 0.1331148669 1.416116e-02
## [625,] 0.386693968 0.3625255950 1.132892e-01
## [626,] 0.211473264 0.4440938538 3.108657e-01
## [627,] 0.340371253 0.0654560102 4.195898e-03
## [628,] 0.360146521 0.0776786613 5.584740e-03
## [629,] 0.406028666 0.1184250277 1.151354e-02
## [630,] 0.417093250 0.1331148669 1.416116e-02
## [631,] 0.443086838 0.2436977611 4.467792e-02
## [632,] 0.436239133 0.2755194522 5.800410e-02
## [633,] 0.444358195 0.2275981001 3.885821e-02
## [634,] 0.424154946 0.3063341278 7.374710e-02
## [635,] 0.430813836 0.2910904300 6.556091e-02
## [636,] 0.424154946 0.3063341278 7.374710e-02
## [637,] 0.360146521 0.0776786613 5.584740e-03
## [638,] 0.397531973 0.3493462791 1.023338e-01
## [639,] 0.407438488 0.3355375785 9.210835e-02
## [640,] 0.335537578 0.4074384881 1.649156e-01
## [641,] 0.444093854 0.2114732637 3.356718e-02
## [642,] 0.436239133 0.2755194522 5.800410e-02
## [643,] 0.275519452 0.4362391326 2.302373e-01
## [644,] 0.360146521 0.0776786613 5.584740e-03
## [645,] 0.417093250 0.1331148669 1.416116e-02
## [646,] 0.417093250 0.1331148669 1.416116e-02
## [647,] 0.440355309 0.2596967205 5.105149e-02
## [648,] 0.424154946 0.3063341278 7.374710e-02
## [649,] 0.416337988 0.3211750193 8.258786e-02
## [650,] 0.243697761 0.4430868383 2.685375e-01
## [651,] 0.360146521 0.0776786613 5.584740e-03
## [652,] 0.436239133 0.2755194522 5.800410e-02
## [653,] 0.397531973 0.3493462791 1.023338e-01
## [654,] 0.377630828 0.0906313987 7.250512e-03
## [655,] 0.444358195 0.2275981001 3.885821e-02
## [656,] 0.375000000 0.3750000000 1.250000e-01
## [657,] 0.424154946 0.3063341278 7.374710e-02
## [658,] 0.306334128 0.4241549461 1.957638e-01
## [659,] 0.436239133 0.2755194522 5.800410e-02
## [660,] 0.444358195 0.2275981001 3.885821e-02
## [661,] 0.377630828 0.0906313987 7.250512e-03
## [662,] 0.417093250 0.1331148669 1.416116e-02
## [663,] 0.444093854 0.2114732637 3.356718e-02
## [664,] 0.335537578 0.4074384881 1.649156e-01
## [665,] 0.306334128 0.4241549461 1.957638e-01
## [666,] 0.179450170 0.4386559699 3.574234e-01
## [667,] 0.259696720 0.4403553087 2.488965e-01
## [668,] 0.406028666 0.1184250277 1.151354e-02
## [669,] 0.443086838 0.2436977611 4.467792e-02
## [670,] 0.375000000 0.3750000000 1.250000e-01
## [671,] 0.306334128 0.4241549461 1.957638e-01
## [672,] 0.386693968 0.3625255950 1.132892e-01
## [673,] 0.407438488 0.3355375785 9.210835e-02
## [674,] 0.377630828 0.0906313987 7.250512e-03
## [675,] 0.318229499 0.0540389715 3.058810e-03
## [676,] 0.291090430 0.4308138364 2.125348e-01
## [677,] 0.406028666 0.1184250277 1.151354e-02
## [678,] 0.375000000 0.3750000000 1.250000e-01
## [679,] 0.362525595 0.3866939680 1.374912e-01
## [680,] 0.362525595 0.3866939680 1.374912e-01
## [681,] 0.424154946 0.3063341278 7.374710e-02
## [682,] 0.259696720 0.4403553087 2.488965e-01
## [683,] 0.043503071 0.2936457319 6.607029e-01
## [684,] 0.204487093 0.0179374643 5.244873e-04
## [685,] 0.392899701 0.1042386963 9.218388e-03
## [686,] 0.407438488 0.3355375785 9.210835e-02
## [687,] 0.291090430 0.4308138364 2.125348e-01
## [688,] 0.424154946 0.3063341278 7.374710e-02
## [689,] 0.424154946 0.3063341278 7.374710e-02
## [690,] 0.406028666 0.1184250277 1.151354e-02
## [691,] 0.211473264 0.4440938538 3.108657e-01
## [692,] 0.386693968 0.3625255950 1.132892e-01
## [693,] 0.306334128 0.4241549461 1.957638e-01
## [694,] 0.360146521 0.0776786613 5.584740e-03
## [695,] 0.433331375 0.1637029640 2.061445e-02
## [696,] 0.266544426 0.0339238361 1.439193e-03
## [697,] 0.349346279 0.3975319727 1.507880e-01
## [698,] 0.417093250 0.1331148669 1.416116e-02
## [699,] 0.227598100 0.4443581954 2.891855e-01
## [700,] 0.179450170 0.4386559699 3.574234e-01
## [701,] 0.340371253 0.0654560102 4.195898e-03
## [702,] 0.335537578 0.4074384881 1.649156e-01
## [703,] 0.360146521 0.0776786613 5.584740e-03
## [704,] 0.426168977 0.1482326877 1.718640e-02
## [705,] 0.266544426 0.0339238361 1.439193e-03
## [706,] 0.118425028 0.4060286664 4.640328e-01
## [707,] 0.430813836 0.2910904300 6.556091e-02
## [708,] 0.416337988 0.3211750193 8.258786e-02
## [709,] 0.433331375 0.1637029640 2.061445e-02
## [710,] 0.375000000 0.3750000000 1.250000e-01
## [711,] 0.211473264 0.4440938538 3.108657e-01
## [712,] 0.291090430 0.4308138364 2.125348e-01
## [713,] 0.406028666 0.1184250277 1.151354e-02
## [714,] 0.321175019 0.4163379880 1.798991e-01
## [715,] 0.259696720 0.4403553087 2.488965e-01
## [716,] 0.349346279 0.3975319727 1.507880e-01
## [717,] 0.275519452 0.4362391326 2.302373e-01
## [718,] 0.377630828 0.0906313987 7.250512e-03
## [719,] 0.131453291 0.0066840657 1.132892e-04
## [720,] 0.211473264 0.4440938538 3.108657e-01
## [721,] 0.211473264 0.4440938538 3.108657e-01
## [722,] 0.386693968 0.3625255950 1.132892e-01
## [723,] 0.444358195 0.2275981001 3.885821e-02
## [724,] 0.406028666 0.1184250277 1.151354e-02
## [725,] 0.349346279 0.3975319727 1.507880e-01
## [726,] 0.424154946 0.3063341278 7.374710e-02
## [727,] 0.407438488 0.3355375785 9.210835e-02
## [728,] 0.236850055 0.0253767916 9.063140e-04
## [729,] 0.442218287 0.1953987782 2.877966e-02
## [730,] 0.043503071 0.2936457319 6.607029e-01
## [731,] 0.362525595 0.3866939680 1.374912e-01
## [732,] 0.318229499 0.0540389715 3.058810e-03
## [733,] 0.440355309 0.2596967205 5.105149e-02
## [734,] 0.090631399 0.0030210466 3.356718e-05
## [735,] 0.375000000 0.3750000000 1.250000e-01
## [736,] 0.266544426 0.0339238361 1.439193e-03
## [737,] 0.321175019 0.4163379880 1.798991e-01
## [738,] 0.416337988 0.3211750193 8.258786e-02
## [739,] 0.406028666 0.1184250277 1.151354e-02
## [740,] 0.397531973 0.3493462791 1.023338e-01
## [741,] 0.293645732 0.0435030714 2.148300e-03
## [742,] 0.392899701 0.1042386963 9.218388e-03
## [743,] 0.406028666 0.1184250277 1.151354e-02
## [744,] 0.362525595 0.3866939680 1.374912e-01
## [745,] 0.375000000 0.3750000000 1.250000e-01
## [746,] 0.266544426 0.0339238361 1.439193e-03
## [747,] 0.211473264 0.4440938538 3.108657e-01
## [748,] 0.179450170 0.4386559699 3.574234e-01
## [749,] 0.163702964 0.4333313752 3.823512e-01
## [750,] 0.360146521 0.0776786613 5.584740e-03
## [751,] 0.349346279 0.3975319727 1.507880e-01
## [752,] 0.340371253 0.0654560102 4.195898e-03
## [753,] 0.438655970 0.1794501695 2.447048e-02
## [754,] 0.340371253 0.0654560102 4.195898e-03
## [755,] 0.444093854 0.2114732637 3.356718e-02
## [756,] 0.433331375 0.1637029640 2.061445e-02
## [757,] 0.407438488 0.3355375785 9.210835e-02
## [758,] 0.442218287 0.1953987782 2.877966e-02
## [759,] 0.227598100 0.4443581954 2.891855e-01
## [760,] 0.349346279 0.3975319727 1.507880e-01
## [761,] 0.293645732 0.0435030714 2.148300e-03
## [762,] 0.406028666 0.1184250277 1.151354e-02
## [763,] 0.204487093 0.0179374643 5.244873e-04
## [764,] 0.362525595 0.3866939680 1.374912e-01
## [765,] 0.266544426 0.0339238361 1.439193e-03
## [766,] 0.430813836 0.2910904300 6.556091e-02
## [767,] 0.438655970 0.1794501695 2.447048e-02
## [768,] 0.362525595 0.3866939680 1.374912e-01
## [769,] 0.426168977 0.1482326877 1.718640e-02
## [770,] 0.426168977 0.1482326877 1.718640e-02
## [771,] 0.444358195 0.2275981001 3.885821e-02
## [772,] 0.443086838 0.2436977611 4.467792e-02
## [773,] 0.406028666 0.1184250277 1.151354e-02
## [774,] 0.163702964 0.4333313752 3.823512e-01
## [775,] 0.104238696 0.3928997013 4.936432e-01
## [776,] 0.444358195 0.2275981001 3.885821e-02
## [777,] 0.392899701 0.1042386963 9.218388e-03
## [778,] 0.195398778 0.4422182874 3.336033e-01
## [779,] 0.131453291 0.0066840657 1.132892e-04
## [780,] 0.321175019 0.4163379880 1.798991e-01
## [781,] 0.436239133 0.2755194522 5.800410e-02
## [782,] 0.306334128 0.4241549461 1.957638e-01
## [783,] 0.438655970 0.1794501695 2.447048e-02
## [784,] 0.211473264 0.4440938538 3.108657e-01
## [785,] 0.436239133 0.2755194522 5.800410e-02
## [786,] 0.440355309 0.2596967205 5.105149e-02
## [787,] 0.426168977 0.1482326877 1.718640e-02
## [788,] 0.169380014 0.0116813803 2.685375e-04
## [789,] 0.397531973 0.3493462791 1.023338e-01
## [790,] 0.227598100 0.4443581954 2.891855e-01
## [791,] 0.360146521 0.0776786613 5.584740e-03
## [792,] 0.406028666 0.1184250277 1.151354e-02
## [793,] 0.375000000 0.3750000000 1.250000e-01
## [794,] 0.417093250 0.1331148669 1.416116e-02
## [795,] 0.349346279 0.3975319727 1.507880e-01
## [796,] 0.442218287 0.1953987782 2.877966e-02
## [797,] 0.163702964 0.4333313752 3.823512e-01
## [798,] 0.443086838 0.2436977611 4.467792e-02
## [799,] 0.416337988 0.3211750193 8.258786e-02
## [800,] 0.133114867 0.4170932496 4.356307e-01
## [801,] 0.362525595 0.3866939680 1.374912e-01
## [802,] 0.386693968 0.3625255950 1.132892e-01
## [803,] 0.377630828 0.0906313987 7.250512e-03
## [804,] 0.442218287 0.1953987782 2.877966e-02
## [805,] 0.349346279 0.3975319727 1.507880e-01
## [806,] 0.291090430 0.4308138364 2.125348e-01
## [807,] 0.417093250 0.1331148669 1.416116e-02
## [808,] 0.426168977 0.1482326877 1.718640e-02
## [809,] 0.375000000 0.3750000000 1.250000e-01
## [810,] 0.179450170 0.4386559699 3.574234e-01
## [811,] 0.392899701 0.1042386963 9.218388e-03
## [812,] 0.430813836 0.2910904300 6.556091e-02
## [813,] 0.430813836 0.2910904300 6.556091e-02
## [814,] 0.386693968 0.3625255950 1.132892e-01
## [815,] 0.386693968 0.3625255950 1.132892e-01
## [816,] 0.360146521 0.0776786613 5.584740e-03
## [817,] 0.335537578 0.4074384881 1.649156e-01
## [818,] 0.443086838 0.2436977611 4.467792e-02
## [819,] 0.306334128 0.4241549461 1.957638e-01
## [820,] 0.444093854 0.2114732637 3.356718e-02
## [821,] 0.340371253 0.0654560102 4.195898e-03
## [822,] 0.417093250 0.1331148669 1.416116e-02
## [823,] 0.424154946 0.3063341278 7.374710e-02
## [824,] 0.440355309 0.2596967205 5.105149e-02
## [825,] 0.392899701 0.1042386963 9.218388e-03
## [826,] 0.236850055 0.0253767916 9.063140e-04
## [827,] 0.426168977 0.1482326877 1.718640e-02
## [828,] 0.340371253 0.0654560102 4.195898e-03
## [829,] 0.377630828 0.0906313987 7.250512e-03
## [830,] 0.416337988 0.3211750193 8.258786e-02
## [831,] 0.433331375 0.1637029640 2.061445e-02
## [832,] 0.397531973 0.3493462791 1.023338e-01
## [833,] 0.054038972 0.3182294988 6.246727e-01
## [834,] 0.444358195 0.2275981001 3.885821e-02
## [835,] 0.440355309 0.2596967205 5.105149e-02
## [836,] 0.090631399 0.0030210466 3.356718e-05
## [837,] 0.426168977 0.1482326877 1.718640e-02
## [838,] 0.293645732 0.0435030714 2.148300e-03
## [839,] 0.349346279 0.3975319727 1.507880e-01
## [840,] 0.266544426 0.0339238361 1.439193e-03
## [841,] 0.442218287 0.1953987782 2.877966e-02
## [842,] 0.291090430 0.4308138364 2.125348e-01
## [843,] 0.444358195 0.2275981001 3.885821e-02
## [844,] 0.407438488 0.3355375785 9.210835e-02
## [845,] 0.386693968 0.3625255950 1.132892e-01
## [846,] 0.306334128 0.4241549461 1.957638e-01
## [847,] 0.386693968 0.3625255950 1.132892e-01
## [848,] 0.397531973 0.3493462791 1.023338e-01
## [849,] 0.090631399 0.0030210466 3.356718e-05
## [850,] 0.442218287 0.1953987782 2.877966e-02
## [851,] 0.407438488 0.3355375785 9.210835e-02
## [852,] 0.306334128 0.4241549461 1.957638e-01
## [853,] 0.349346279 0.3975319727 1.507880e-01
## [854,] 0.406028666 0.1184250277 1.151354e-02
## [855,] 0.433331375 0.1637029640 2.061445e-02
## [856,] 0.179450170 0.4386559699 3.574234e-01
## [857,] 0.397531973 0.3493462791 1.023338e-01
## [858,] 0.340371253 0.0654560102 4.195898e-03
## [859,] 0.195398778 0.4422182874 3.336033e-01
## [860,] 0.293645732 0.0435030714 2.148300e-03
## [861,] 0.436239133 0.2755194522 5.800410e-02
## [862,] 0.392899701 0.1042386963 9.218388e-03
## [863,] 0.424154946 0.3063341278 7.374710e-02
## [864,] 0.407438488 0.3355375785 9.210835e-02
## [865,] 0.306334128 0.4241549461 1.957638e-01
## [866,] 0.443086838 0.2436977611 4.467792e-02
## [867,] 0.444093854 0.2114732637 3.356718e-02
## [868,] 0.430813836 0.2910904300 6.556091e-02
## [869,] 0.377630828 0.0906313987 7.250512e-03
## [870,] 0.243697761 0.4430868383 2.685375e-01
## [871,] 0.416337988 0.3211750193 8.258786e-02
## [872,] 0.397531973 0.3493462791 1.023338e-01
## [873,] 0.397531973 0.3493462791 1.023338e-01
## [874,] 0.227598100 0.4443581954 2.891855e-01
## [875,] 0.443086838 0.2436977611 4.467792e-02
## [876,] 0.436239133 0.2755194522 5.800410e-02
## [877,] 0.360146521 0.0776786613 5.584740e-03
## [878,] 0.243697761 0.4430868383 2.685375e-01
## [879,] 0.433331375 0.1637029640 2.061445e-02
## [880,] 0.386693968 0.3625255950 1.132892e-01
## [881,] 0.318229499 0.0540389715 3.058810e-03
## [882,] 0.443086838 0.2436977611 4.467792e-02
## [883,] 0.426168977 0.1482326877 1.718640e-02
## [884,] 0.090631399 0.0030210466 3.356718e-05
## [885,] 0.362525595 0.3866939680 1.374912e-01
## [886,] 0.436239133 0.2755194522 5.800410e-02
## [887,] 0.416337988 0.3211750193 8.258786e-02
## [888,] 0.227598100 0.4443581954 2.891855e-01
## [889,] 0.104238696 0.3928997013 4.936432e-01
## [890,] 0.293645732 0.0435030714 2.148300e-03
## [891,] 0.426168977 0.1482326877 1.718640e-02
## [892,] 0.424154946 0.3063341278 7.374710e-02
## [893,] 0.321175019 0.4163379880 1.798991e-01
## [894,] 0.306334128 0.4241549461 1.957638e-01
## [895,] 0.291090430 0.4308138364 2.125348e-01
## [896,] 0.377630828 0.0906313987 7.250512e-03
## [897,] 0.386693968 0.3625255950 1.132892e-01
## [898,] 0.386693968 0.3625255950 1.132892e-01
## [899,] 0.377630828 0.0906313987 7.250512e-03
## [900,] 0.266544426 0.0339238361 1.439193e-03
## [901,] 0.227598100 0.4443581954 2.891855e-01
## [902,] 0.444093854 0.2114732637 3.356718e-02
## [903,] 0.443086838 0.2436977611 4.467792e-02
## [904,] 0.438655970 0.1794501695 2.447048e-02
## [905,] 0.340371253 0.0654560102 4.195898e-03
## [906,] 0.426168977 0.1482326877 1.718640e-02
## [907,] 0.444358195 0.2275981001 3.885821e-02
## [908,] 0.340371253 0.0654560102 4.195898e-03
## [909,] 0.318229499 0.0540389715 3.058810e-03
## [910,] 0.426168977 0.1482326877 1.718640e-02
## [911,] 0.444093854 0.2114732637 3.356718e-02
## [912,] 0.349346279 0.3975319727 1.507880e-01
## [913,] 0.436239133 0.2755194522 5.800410e-02
## [914,] 0.406028666 0.1184250277 1.151354e-02
## [915,] 0.318229499 0.0540389715 3.058810e-03
## [916,] 0.349346279 0.3975319727 1.507880e-01
## [917,] 0.266544426 0.0339238361 1.439193e-03
## [918,] 0.211473264 0.4440938538 3.108657e-01
## [919,] 0.179450170 0.4386559699 3.574234e-01
## [920,] 0.321175019 0.4163379880 1.798991e-01
## [921,] 0.444358195 0.2275981001 3.885821e-02
## [922,] 0.204487093 0.0179374643 5.244873e-04
## [923,] 0.397531973 0.3493462791 1.023338e-01
## [924,] 0.406028666 0.1184250277 1.151354e-02
## [925,] 0.259696720 0.4403553087 2.488965e-01
## [926,] 0.243697761 0.4430868383 2.685375e-01
## [927,] 0.397531973 0.3493462791 1.023338e-01
## [928,] 0.440355309 0.2596967205 5.105149e-02
## [929,] 0.318229499 0.0540389715 3.058810e-03
## [930,] 0.046838810 0.0007678494 4.195898e-06
## [931,] 0.424154946 0.3063341278 7.374710e-02
## [932,] 0.406028666 0.1184250277 1.151354e-02
## [933,] 0.392899701 0.1042386963 9.218388e-03
## [934,] 0.362525595 0.3866939680 1.374912e-01
## [935,] 0.335537578 0.4074384881 1.649156e-01
## [936,] 0.417093250 0.1331148669 1.416116e-02
## [937,] 0.360146521 0.0776786613 5.584740e-03
## [938,] 0.426168977 0.1482326877 1.718640e-02
## [939,] 0.169380014 0.0116813803 2.685375e-04
## [940,] 0.436239133 0.2755194522 5.800410e-02
## [941,] 0.424154946 0.3063341278 7.374710e-02
## [942,] 0.416337988 0.3211750193 8.258786e-02
## [943,] 0.407438488 0.3355375785 9.210835e-02
## [944,] 0.227598100 0.4443581954 2.891855e-01
## [945,] 0.335537578 0.4074384881 1.649156e-01
## [946,] 0.416337988 0.3211750193 8.258786e-02
## [947,] 0.321175019 0.4163379880 1.798991e-01
## [948,] 0.340371253 0.0654560102 4.195898e-03
## [949,] 0.335537578 0.4074384881 1.649156e-01
## [950,] 0.440355309 0.2596967205 5.105149e-02
## [951,] 0.424154946 0.3063341278 7.374710e-02
## [952,] 0.386693968 0.3625255950 1.132892e-01
## [953,] 0.397531973 0.3493462791 1.023338e-01
## [954,] 0.392899701 0.1042386963 9.218388e-03
## [955,] 0.340371253 0.0654560102 4.195898e-03
## [956,] 0.416337988 0.3211750193 8.258786e-02
## [957,] 0.275519452 0.4362391326 2.302373e-01
## [958,] 0.397531973 0.3493462791 1.023338e-01
## [959,] 0.440355309 0.2596967205 5.105149e-02
## [960,] 0.375000000 0.3750000000 1.250000e-01
## [961,] 0.386693968 0.3625255950 1.132892e-01
## [962,] 0.259696720 0.4403553087 2.488965e-01
## [963,] 0.416337988 0.3211750193 8.258786e-02
## [964,] 0.335537578 0.4074384881 1.649156e-01
## [965,] 0.349346279 0.3975319727 1.507880e-01
## [966,] 0.407438488 0.3355375785 9.210835e-02
## [967,] 0.416337988 0.3211750193 8.258786e-02
## [968,] 0.443086838 0.2436977611 4.467792e-02
## [969,] 0.386693968 0.3625255950 1.132892e-01
## [970,] 0.397531973 0.3493462791 1.023338e-01
## [971,] 0.416337988 0.3211750193 8.258786e-02
## [972,] 0.375000000 0.3750000000 1.250000e-01
## [973,] 0.259696720 0.4403553087 2.488965e-01
## [974,] 0.006684066 0.1314532913 8.617494e-01
## [975,] 0.386693968 0.3625255950 1.132892e-01
## [976,] 0.275519452 0.4362391326 2.302373e-01
## [977,] 0.444358195 0.2275981001 3.885821e-02
## [978,] 0.424154946 0.3063341278 7.374710e-02
## [979,] 0.375000000 0.3750000000 1.250000e-01
## [980,] 0.243697761 0.4430868383 2.685375e-01
## [981,] 0.407438488 0.3355375785 9.210835e-02
## [982,] 0.293645732 0.0435030714 2.148300e-03
## [983,] 0.195398778 0.4422182874 3.336033e-01
## [984,] 0.179450170 0.4386559699 3.574234e-01
## [985,] 0.397531973 0.3493462791 1.023338e-01
## [986,] 0.443086838 0.2436977611 4.467792e-02
## [987,] 0.433331375 0.1637029640 2.061445e-02
## [988,] 0.195398778 0.4422182874 3.336033e-01
## [989,] 0.416337988 0.3211750193 8.258786e-02
## [990,] 0.318229499 0.0540389715 3.058810e-03
## [991,] 0.360146521 0.0776786613 5.584740e-03
## [992,] 0.362525595 0.3866939680 1.374912e-01
## [993,] 0.266544426 0.0339238361 1.439193e-03
## [994,] 0.440355309 0.2596967205 5.105149e-02
## [995,] 0.444093854 0.2114732637 3.356718e-02
## [996,] 0.438655970 0.1794501695 2.447048e-02
## [997,] 0.204487093 0.0179374643 5.244873e-04
## [998,] 0.340371253 0.0654560102 4.195898e-03
## [999,] 0.436239133 0.2755194522 5.800410e-02
## [1000,] 0.442218287 0.1953987782 2.877966e-02
## [1001,] 0.243697761 0.4430868383 2.685375e-01
## [1002,] 0.148232688 0.4261689772 4.084119e-01
## [1003,] 0.416337988 0.3211750193 8.258786e-02
## [1004,] 0.443086838 0.2436977611 4.467792e-02
## [1005,] 0.291090430 0.4308138364 2.125348e-01
## [1006,] 0.407438488 0.3355375785 9.210835e-02
## [1007,] 0.291090430 0.4308138364 2.125348e-01
## [1008,] 0.321175019 0.4163379880 1.798991e-01
## [1009,] 0.417093250 0.1331148669 1.416116e-02
## [1010,] 0.306334128 0.4241549461 1.957638e-01
## [1011,] 0.406028666 0.1184250277 1.151354e-02
## [1012,] 0.306334128 0.4241549461 1.957638e-01
## [1013,] 0.444093854 0.2114732637 3.356718e-02
## [1014,] 0.392899701 0.1042386963 9.218388e-03
## [1015,] 0.440355309 0.2596967205 5.105149e-02
## [1016,] 0.416337988 0.3211750193 8.258786e-02
## [1017,] 0.375000000 0.3750000000 1.250000e-01
## [1018,] 0.362525595 0.3866939680 1.374912e-01
## [1019,] 0.443086838 0.2436977611 4.467792e-02
## [1020,] 0.360146521 0.0776786613 5.584740e-03
## [1021,] 0.406028666 0.1184250277 1.151354e-02
## [1022,] 0.349346279 0.3975319727 1.507880e-01
## [1023,] 0.436239133 0.2755194522 5.800410e-02
## [1024,] 0.227598100 0.4443581954 2.891855e-01
## [1025,] 0.392899701 0.1042386963 9.218388e-03
## [1026,] 0.360146521 0.0776786613 5.584740e-03
## [1027,] 0.293645732 0.0435030714 2.148300e-03
## [1028,] 0.362525595 0.3866939680 1.374912e-01
## [1029,] 0.179450170 0.4386559699 3.574234e-01
## [1030,] 0.433331375 0.1637029640 2.061445e-02
## [1031,] 0.169380014 0.0116813803 2.685375e-04
## [1032,] 0.291090430 0.4308138364 2.125348e-01
## [1033,] 0.163702964 0.4333313752 3.823512e-01
## [1034,] 0.430813836 0.2910904300 6.556091e-02
## [1035,] 0.375000000 0.3750000000 1.250000e-01
## [1036,] 0.438655970 0.1794501695 2.447048e-02
## [1037,] 0.293645732 0.0435030714 2.148300e-03
## [1038,] 0.407438488 0.3355375785 9.210835e-02
## [1039,] 0.169380014 0.0116813803 2.685375e-04
## [1040,] 0.163702964 0.4333313752 3.823512e-01
## [1041,] 0.424154946 0.3063341278 7.374710e-02
## [1042,] 0.349346279 0.3975319727 1.507880e-01
## [1043,] 0.407438488 0.3355375785 9.210835e-02
## [1044,] 0.430813836 0.2910904300 6.556091e-02
## [1045,] 0.443086838 0.2436977611 4.467792e-02
## [1046,] 0.440355309 0.2596967205 5.105149e-02
## [1047,] 0.349346279 0.3975319727 1.507880e-01
## [1048,] 0.426168977 0.1482326877 1.718640e-02
## [1049,] 0.416337988 0.3211750193 8.258786e-02
## [1050,] 0.433331375 0.1637029640 2.061445e-02
## [1051,] 0.417093250 0.1331148669 1.416116e-02
## [1052,] 0.407438488 0.3355375785 9.210835e-02
## [1053,] 0.424154946 0.3063341278 7.374710e-02
## [1054,] 0.362525595 0.3866939680 1.374912e-01
## [1055,] 0.291090430 0.4308138364 2.125348e-01
## [1056,] 0.375000000 0.3750000000 1.250000e-01
## [1057,] 0.397531973 0.3493462791 1.023338e-01
## [1058,] 0.443086838 0.2436977611 4.467792e-02
## [1059,] 0.131453291 0.0066840657 1.132892e-04
## [1060,] 0.211473264 0.4440938538 3.108657e-01
## [1061,] 0.275519452 0.4362391326 2.302373e-01
## [1062,] 0.195398778 0.4422182874 3.336033e-01
## [1063,] 0.424154946 0.3063341278 7.374710e-02
## [1064,] 0.430813836 0.2910904300 6.556091e-02
## [1065,] 0.360146521 0.0776786613 5.584740e-03
## [1066,] 0.444093854 0.2114732637 3.356718e-02
## [1067,] 0.293645732 0.0435030714 2.148300e-03
## [1068,] 0.340371253 0.0654560102 4.195898e-03
## [1069,] 0.416337988 0.3211750193 8.258786e-02
## [1070,] 0.444358195 0.2275981001 3.885821e-02
## [1071,] 0.417093250 0.1331148669 1.416116e-02
## [1072,] 0.424154946 0.3063341278 7.374710e-02
## [1073,] 0.386693968 0.3625255950 1.132892e-01
## [1074,] 0.416337988 0.3211750193 8.258786e-02
## [1075,] 0.275519452 0.4362391326 2.302373e-01
## [1076,] 0.443086838 0.2436977611 4.467792e-02
## [1077,] 0.054038972 0.3182294988 6.246727e-01
## [1078,] 0.377630828 0.0906313987 7.250512e-03
## [1079,] 0.416337988 0.3211750193 8.258786e-02
## [1080,] 0.440355309 0.2596967205 5.105149e-02
## [1081,] 0.443086838 0.2436977611 4.467792e-02
## [1082,] 0.227598100 0.4443581954 2.891855e-01
## [1083,] 0.444093854 0.2114732637 3.356718e-02
## [1084,] 0.293645732 0.0435030714 2.148300e-03
## [1085,] 0.321175019 0.4163379880 1.798991e-01
## [1086,] 0.407438488 0.3355375785 9.210835e-02
## [1087,] 0.436239133 0.2755194522 5.800410e-02
## [1088,] 0.377630828 0.0906313987 7.250512e-03
## [1089,] 0.426168977 0.1482326877 1.718640e-02
## [1090,] 0.335537578 0.4074384881 1.649156e-01
## [1091,] 0.335537578 0.4074384881 1.649156e-01
## [1092,] 0.306334128 0.4241549461 1.957638e-01
## [1093,] 0.397531973 0.3493462791 1.023338e-01
## [1094,] 0.131453291 0.0066840657 1.132892e-04
## [1095,] 0.043503071 0.2936457319 6.607029e-01
## [1096,] 0.444093854 0.2114732637 3.356718e-02
## [1097,] 0.321175019 0.4163379880 1.798991e-01
## [1098,] 0.433331375 0.1637029640 2.061445e-02
## [1099,] 0.211473264 0.4440938538 3.108657e-01
## [1100,] 0.444358195 0.2275981001 3.885821e-02
## [1101,] 0.195398778 0.4422182874 3.336033e-01
## [1102,] 0.148232688 0.4261689772 4.084119e-01
## [1103,] 0.407438488 0.3355375785 9.210835e-02
## [1104,] 0.266544426 0.0339238361 1.439193e-03
## [1105,] 0.000000000 0.0000000000 1.000000e+00
## [1106,] 0.349346279 0.3975319727 1.507880e-01
## [1107,] 0.243697761 0.4430868383 2.685375e-01
## [1108,] 0.335537578 0.4074384881 1.649156e-01
## [1109,] 0.416337988 0.3211750193 8.258786e-02
## [1110,] 0.392899701 0.1042386963 9.218388e-03
## [1111,] 0.375000000 0.3750000000 1.250000e-01
## [1112,] 0.397531973 0.3493462791 1.023338e-01
## [1113,] 0.444358195 0.2275981001 3.885821e-02
## [1114,] 0.321175019 0.4163379880 1.798991e-01
## [1115,] 0.442218287 0.1953987782 2.877966e-02
## [1116,] 0.335537578 0.4074384881 1.649156e-01
## [1117,] 0.444358195 0.2275981001 3.885821e-02
## [1118,] 0.163702964 0.4333313752 3.823512e-01
## [1119,] 0.204487093 0.0179374643 5.244873e-04
## [1120,] 0.179450170 0.4386559699 3.574234e-01
## [1121,] 0.430813836 0.2910904300 6.556091e-02
## [1122,] 0.426168977 0.1482326877 1.718640e-02
## [1123,] 0.444093854 0.2114732637 3.356718e-02
## [1124,] 0.266544426 0.0339238361 1.439193e-03
## [1125,] 0.377630828 0.0906313987 7.250512e-03
## [1126,] 0.417093250 0.1331148669 1.416116e-02
## [1127,] 0.360146521 0.0776786613 5.584740e-03
## [1128,] 0.406028666 0.1184250277 1.151354e-02
## [1129,] 0.306334128 0.4241549461 1.957638e-01
## [1130,] 0.236850055 0.0253767916 9.063140e-04
## [1131,] 0.377630828 0.0906313987 7.250512e-03
## [1132,] 0.397531973 0.3493462791 1.023338e-01
## [1133,] 0.424154946 0.3063341278 7.374710e-02
## [1134,] 0.440355309 0.2596967205 5.105149e-02
## [1135,] 0.306334128 0.4241549461 1.957638e-01
## [1136,] 0.266544426 0.0339238361 1.439193e-03
## [1137,] 0.375000000 0.3750000000 1.250000e-01
## [1138,] 0.433331375 0.1637029640 2.061445e-02
## [1139,] 0.118425028 0.4060286664 4.640328e-01
## [1140,] 0.259696720 0.4403553087 2.488965e-01
## [1141,] 0.397531973 0.3493462791 1.023338e-01
## [1142,] 0.275519452 0.4362391326 2.302373e-01
## [1143,] 0.426168977 0.1482326877 1.718640e-02
## [1144,] 0.204487093 0.0179374643 5.244873e-04
## [1145,] 0.430813836 0.2910904300 6.556091e-02
## [1146,] 0.438655970 0.1794501695 2.447048e-02
## [1147,] 0.169380014 0.0116813803 2.685375e-04
## [1148,] 0.362525595 0.3866939680 1.374912e-01
## [1149,] 0.243697761 0.4430868383 2.685375e-01
## [1150,] 0.424154946 0.3063341278 7.374710e-02
## [1151,] 0.362525595 0.3866939680 1.374912e-01
## [1152,] 0.291090430 0.4308138364 2.125348e-01
## [1153,] 0.406028666 0.1184250277 1.151354e-02
## [1154,] 0.362525595 0.3866939680 1.374912e-01
## [1155,] 0.236850055 0.0253767916 9.063140e-04
## [1156,] 0.321175019 0.4163379880 1.798991e-01
## [1157,] 0.266544426 0.0339238361 1.439193e-03
## [1158,] 0.259696720 0.4403553087 2.488965e-01
## [1159,] 0.430813836 0.2910904300 6.556091e-02
## [1160,] 0.443086838 0.2436977611 4.467792e-02
## [1161,] 0.444358195 0.2275981001 3.885821e-02
## [1162,] 0.406028666 0.1184250277 1.151354e-02
## [1163,] 0.386693968 0.3625255950 1.132892e-01
## [1164,] 0.433331375 0.1637029640 2.061445e-02
## [1165,] 0.335537578 0.4074384881 1.649156e-01
## [1166,] 0.362525595 0.3866939680 1.374912e-01
## [1167,] 0.433331375 0.1637029640 2.061445e-02
## [1168,] 0.318229499 0.0540389715 3.058810e-03
## [1169,] 0.259696720 0.4403553087 2.488965e-01
## [1170,] 0.386693968 0.3625255950 1.132892e-01
## [1171,] 0.440355309 0.2596967205 5.105149e-02
## [1172,] 0.227598100 0.4443581954 2.891855e-01
## [1173,] 0.291090430 0.4308138364 2.125348e-01
## [1174,] 0.426168977 0.1482326877 1.718640e-02
## [1175,] 0.430813836 0.2910904300 6.556091e-02
## [1176,] 0.430813836 0.2910904300 6.556091e-02
## [1177,] 0.417093250 0.1331148669 1.416116e-02
## [1178,] 0.131453291 0.0066840657 1.132892e-04
## [1179,] 0.306334128 0.4241549461 1.957638e-01
## [1180,] 0.306334128 0.4241549461 1.957638e-01
## [1181,] 0.433331375 0.1637029640 2.061445e-02
## [1182,] 0.204487093 0.0179374643 5.244873e-04
## [1183,] 0.195398778 0.4422182874 3.336033e-01
## [1184,] 0.349346279 0.3975319727 1.507880e-01
## [1185,] 0.090631399 0.0030210466 3.356718e-05
## [1186,] 0.349346279 0.3975319727 1.507880e-01
## [1187,] 0.133114867 0.4170932496 4.356307e-01
## [1188,] 0.442218287 0.1953987782 2.877966e-02
## [1189,] 0.236850055 0.0253767916 9.063140e-04
## [1190,] 0.438655970 0.1794501695 2.447048e-02
## [1191,] 0.417093250 0.1331148669 1.416116e-02
## [1192,] 0.438655970 0.1794501695 2.447048e-02
## [1193,] 0.406028666 0.1184250277 1.151354e-02
## [1194,] 0.416337988 0.3211750193 8.258786e-02
## [1195,] 0.417093250 0.1331148669 1.416116e-02
## [1196,] 0.397531973 0.3493462791 1.023338e-01
## [1197,] 0.442218287 0.1953987782 2.877966e-02
## [1198,] 0.259696720 0.4403553087 2.488965e-01
## [1199,] 0.397531973 0.3493462791 1.023338e-01
## [1200,] 0.360146521 0.0776786613 5.584740e-03
## [1201,] 0.442218287 0.1953987782 2.877966e-02
## [1202,] 0.259696720 0.4403553087 2.488965e-01
## [1203,] 0.444358195 0.2275981001 3.885821e-02
## [1204,] 0.227598100 0.4443581954 2.891855e-01
## [1205,] 0.392899701 0.1042386963 9.218388e-03
## [1206,] 0.293645732 0.0435030714 2.148300e-03
## [1207,] 0.444093854 0.2114732637 3.356718e-02
## [1208,] 0.349346279 0.3975319727 1.507880e-01
## [1209,] 0.406028666 0.1184250277 1.151354e-02
## [1210,] 0.375000000 0.3750000000 1.250000e-01
## [1211,] 0.443086838 0.2436977611 4.467792e-02
## [1212,] 0.211473264 0.4440938538 3.108657e-01
## [1213,] 0.377630828 0.0906313987 7.250512e-03
## [1214,] 0.440355309 0.2596967205 5.105149e-02
## [1215,] 0.406028666 0.1184250277 1.151354e-02
## [1216,] 0.440355309 0.2596967205 5.105149e-02
## [1217,] 0.321175019 0.4163379880 1.798991e-01
## [1218,] 0.433331375 0.1637029640 2.061445e-02
## [1219,] 0.430813836 0.2910904300 6.556091e-02
## [1220,] 0.362525595 0.3866939680 1.374912e-01
## [1221,] 0.046838810 0.0007678494 4.195898e-06
## [1222,] 0.321175019 0.4163379880 1.798991e-01
## [1223,] 0.169380014 0.0116813803 2.685375e-04
## [1224,] 0.375000000 0.3750000000 1.250000e-01
## [1225,] 0.417093250 0.1331148669 1.416116e-02
## [1226,] 0.392899701 0.1042386963 9.218388e-03
## [1227,] 0.430813836 0.2910904300 6.556091e-02
## [1228,] 0.443086838 0.2436977611 4.467792e-02
## [1229,] 0.386693968 0.3625255950 1.132892e-01
## [1230,] 0.407438488 0.3355375785 9.210835e-02
## [1231,] 0.243697761 0.4430868383 2.685375e-01
## [1232,] 0.362525595 0.3866939680 1.374912e-01
## [1233,] 0.444093854 0.2114732637 3.356718e-02
## [1234,] 0.417093250 0.1331148669 1.416116e-02
## [1235,] 0.335537578 0.4074384881 1.649156e-01
## [1236,] 0.321175019 0.4163379880 1.798991e-01
## [1237,] 0.442218287 0.1953987782 2.877966e-02
## [1238,] 0.306334128 0.4241549461 1.957638e-01
## [1239,] 0.306334128 0.4241549461 1.957638e-01
## [1240,] 0.266544426 0.0339238361 1.439193e-03
## [1241,] 0.433331375 0.1637029640 2.061445e-02
## [1242,] 0.360146521 0.0776786613 5.584740e-03
## [1243,] 0.430813836 0.2910904300 6.556091e-02
## [1244,] 0.291090430 0.4308138364 2.125348e-01
## [1245,] 0.386693968 0.3625255950 1.132892e-01
## [1246,] 0.436239133 0.2755194522 5.800410e-02
## [1247,] 0.430813836 0.2910904300 6.556091e-02
## [1248,] 0.406028666 0.1184250277 1.151354e-02
## [1249,] 0.090631399 0.0030210466 3.356718e-05
## [1250,] 0.430813836 0.2910904300 6.556091e-02
## [1251,] 0.243697761 0.4430868383 2.685375e-01
## [1252,] 0.444093854 0.2114732637 3.356718e-02
## [1253,] 0.204487093 0.0179374643 5.244873e-04
## [1254,] 0.306334128 0.4241549461 1.957638e-01
## [1255,] 0.118425028 0.4060286664 4.640328e-01
## [1256,] 0.397531973 0.3493462791 1.023338e-01
## [1257,] 0.444358195 0.2275981001 3.885821e-02
## [1258,] 0.433331375 0.1637029640 2.061445e-02
## [1259,] 0.443086838 0.2436977611 4.467792e-02
## [1260,] 0.443086838 0.2436977611 4.467792e-02
## [1261,] 0.433331375 0.1637029640 2.061445e-02
## [1262,] 0.293645732 0.0435030714 2.148300e-03
## [1263,] 0.204487093 0.0179374643 5.244873e-04
## [1264,] 0.195398778 0.4422182874 3.336033e-01
## [1265,] 0.236850055 0.0253767916 9.063140e-04
## [1266,] 0.362525595 0.3866939680 1.374912e-01
## [1267,] 0.169380014 0.0116813803 2.685375e-04
## [1268,] 0.179450170 0.4386559699 3.574234e-01
## [1269,] 0.440355309 0.2596967205 5.105149e-02
## [1270,] 0.306334128 0.4241549461 1.957638e-01
## [1271,] 0.360146521 0.0776786613 5.584740e-03
## [1272,] 0.444358195 0.2275981001 3.885821e-02
## [1273,] 0.054038972 0.3182294988 6.246727e-01
## [1274,] 0.169380014 0.0116813803 2.685375e-04
## [1275,] 0.386693968 0.3625255950 1.132892e-01
## [1276,] 0.433331375 0.1637029640 2.061445e-02
## [1277,] 0.407438488 0.3355375785 9.210835e-02
## [1278,] 0.291090430 0.4308138364 2.125348e-01
## [1279,] 0.438655970 0.1794501695 2.447048e-02
## [1280,] 0.131453291 0.0066840657 1.132892e-04
## [1281,] 0.440355309 0.2596967205 5.105149e-02
## [1282,] 0.406028666 0.1184250277 1.151354e-02
## [1283,] 0.438655970 0.1794501695 2.447048e-02
## [1284,] 0.340371253 0.0654560102 4.195898e-03
## [1285,] 0.440355309 0.2596967205 5.105149e-02
## [1286,] 0.291090430 0.4308138364 2.125348e-01
## [1287,] 0.424154946 0.3063341278 7.374710e-02
## [1288,] 0.440355309 0.2596967205 5.105149e-02
## [1289,] 0.259696720 0.4403553087 2.488965e-01
## [1290,] 0.291090430 0.4308138364 2.125348e-01
## [1291,] 0.438655970 0.1794501695 2.447048e-02
## [1292,] 0.430813836 0.2910904300 6.556091e-02
## [1293,] 0.318229499 0.0540389715 3.058810e-03
## [1294,] 0.406028666 0.1184250277 1.151354e-02
## [1295,] 0.444093854 0.2114732637 3.356718e-02
## [1296,] 0.340371253 0.0654560102 4.195898e-03
## [1297,] 0.436239133 0.2755194522 5.800410e-02
## [1298,] 0.349346279 0.3975319727 1.507880e-01
## [1299,] 0.291090430 0.4308138364 2.125348e-01
## [1300,] 0.444358195 0.2275981001 3.885821e-02
## [1301,] 0.436239133 0.2755194522 5.800410e-02
## [1302,] 0.204487093 0.0179374643 5.244873e-04
## [1303,] 0.443086838 0.2436977611 4.467792e-02
## [1304,] 0.443086838 0.2436977611 4.467792e-02
## [1305,] 0.349346279 0.3975319727 1.507880e-01
## [1306,] 0.011681380 0.1693800141 8.186701e-01
## [1307,] 0.318229499 0.0540389715 3.058810e-03
## [1308,] 0.266544426 0.0339238361 1.439193e-03
## [1309,] 0.318229499 0.0540389715 3.058810e-03
## [1310,] 0.417093250 0.1331148669 1.416116e-02
## [1311,] 0.349346279 0.3975319727 1.507880e-01
## [1312,] 0.169380014 0.0116813803 2.685375e-04
## [1313,] 0.397531973 0.3493462791 1.023338e-01
## [1314,] 0.426168977 0.1482326877 1.718640e-02
## [1315,] 0.397531973 0.3493462791 1.023338e-01
## [1316,] 0.392899701 0.1042386963 9.218388e-03
## [1317,] 0.397531973 0.3493462791 1.023338e-01
## [1318,] 0.375000000 0.3750000000 1.250000e-01
## [1319,] 0.443086838 0.2436977611 4.467792e-02
## [1320,] 0.349346279 0.3975319727 1.507880e-01
## [1321,] 0.392899701 0.1042386963 9.218388e-03
## [1322,] 0.386693968 0.3625255950 1.132892e-01
## [1323,] 0.275519452 0.4362391326 2.302373e-01
## [1324,] 0.407438488 0.3355375785 9.210835e-02
## [1325,] 0.321175019 0.4163379880 1.798991e-01
## [1326,] 0.406028666 0.1184250277 1.151354e-02
## [1327,] 0.291090430 0.4308138364 2.125348e-01
## [1328,] 0.433331375 0.1637029640 2.061445e-02
## [1329,] 0.417093250 0.1331148669 1.416116e-02
## [1330,] 0.417093250 0.1331148669 1.416116e-02
## [1331,] 0.440355309 0.2596967205 5.105149e-02
## [1332,] 0.436239133 0.2755194522 5.800410e-02
## [1333,] 0.243697761 0.4430868383 2.685375e-01
## [1334,] 0.416337988 0.3211750193 8.258786e-02
## [1335,] 0.397531973 0.3493462791 1.023338e-01
## [1336,] 0.426168977 0.1482326877 1.718640e-02
## [1337,] 0.430813836 0.2910904300 6.556091e-02
## [1338,] 0.243697761 0.4430868383 2.685375e-01
## [1339,] 0.424154946 0.3063341278 7.374710e-02
## [1340,] 0.438655970 0.1794501695 2.447048e-02
## [1341,] 0.397531973 0.3493462791 1.023338e-01
## [1342,] 0.275519452 0.4362391326 2.302373e-01
## [1343,] 0.444093854 0.2114732637 3.356718e-02
## [1344,] 0.424154946 0.3063341278 7.374710e-02
## [1345,] 0.275519452 0.4362391326 2.302373e-01
## [1346,] 0.349346279 0.3975319727 1.507880e-01
## [1347,] 0.440355309 0.2596967205 5.105149e-02
## [1348,] 0.335537578 0.4074384881 1.649156e-01
## [1349,] 0.318229499 0.0540389715 3.058810e-03
## [1350,] 0.335537578 0.4074384881 1.649156e-01
## [1351,] 0.349346279 0.3975319727 1.507880e-01
## [1352,] 0.349346279 0.3975319727 1.507880e-01
## [1353,] 0.340371253 0.0654560102 4.195898e-03
## [1354,] 0.375000000 0.3750000000 1.250000e-01
## [1355,] 0.195398778 0.4422182874 3.336033e-01
## [1356,] 0.204487093 0.0179374643 5.244873e-04
## [1357,] 0.321175019 0.4163379880 1.798991e-01
## [1358,] 0.291090430 0.4308138364 2.125348e-01
## [1359,] 0.386693968 0.3625255950 1.132892e-01
## [1360,] 0.362525595 0.3866939680 1.374912e-01
## [1361,] 0.375000000 0.3750000000 1.250000e-01
## [1362,] 0.375000000 0.3750000000 1.250000e-01
## [1363,] 0.430813836 0.2910904300 6.556091e-02
## [1364,] 0.407438488 0.3355375785 9.210835e-02
## [1365,] 0.386693968 0.3625255950 1.132892e-01
## [1366,] 0.046838810 0.0007678494 4.195898e-06
## [1367,] 0.275519452 0.4362391326 2.302373e-01
## [1368,] 0.424154946 0.3063341278 7.374710e-02
## [1369,] 0.436239133 0.2755194522 5.800410e-02
## [1370,] 0.406028666 0.1184250277 1.151354e-02
## [1371,] 0.406028666 0.1184250277 1.151354e-02
## [1372,] 0.430813836 0.2910904300 6.556091e-02
## [1373,] 0.259696720 0.4403553087 2.488965e-01
## [1374,] 0.104238696 0.3928997013 4.936432e-01
## [1375,] 0.392899701 0.1042386963 9.218388e-03
## [1376,] 0.375000000 0.3750000000 1.250000e-01
## [1377,] 0.440355309 0.2596967205 5.105149e-02
## [1378,] 0.433331375 0.1637029640 2.061445e-02
## [1379,] 0.417093250 0.1331148669 1.416116e-02
## [1380,] 0.321175019 0.4163379880 1.798991e-01
## [1381,] 0.430813836 0.2910904300 6.556091e-02
## [1382,] 0.438655970 0.1794501695 2.447048e-02
## [1383,] 0.444093854 0.2114732637 3.356718e-02
## [1384,] 0.243697761 0.4430868383 2.685375e-01
## [1385,] 0.416337988 0.3211750193 8.258786e-02
## [1386,] 0.426168977 0.1482326877 1.718640e-02
## [1387,] 0.131453291 0.0066840657 1.132892e-04
## [1388,] 0.444358195 0.2275981001 3.885821e-02
## [1389,] 0.340371253 0.0654560102 4.195898e-03
## [1390,] 0.306334128 0.4241549461 1.957638e-01
## [1391,] 0.236850055 0.0253767916 9.063140e-04
## [1392,] 0.392899701 0.1042386963 9.218388e-03
## [1393,] 0.424154946 0.3063341278 7.374710e-02
## [1394,] 0.377630828 0.0906313987 7.250512e-03
## [1395,] 0.440355309 0.2596967205 5.105149e-02
## [1396,] 0.293645732 0.0435030714 2.148300e-03
## [1397,] 0.406028666 0.1184250277 1.151354e-02
## [1398,] 0.436239133 0.2755194522 5.800410e-02
## [1399,] 0.424154946 0.3063341278 7.374710e-02
## [1400,] 0.377630828 0.0906313987 7.250512e-03
## [1401,] 0.243697761 0.4430868383 2.685375e-01
## [1402,] 0.417093250 0.1331148669 1.416116e-02
## [1403,] 0.340371253 0.0654560102 4.195898e-03
## [1404,] 0.430813836 0.2910904300 6.556091e-02
## [1405,] 0.375000000 0.3750000000 1.250000e-01
## [1406,] 0.438655970 0.1794501695 2.447048e-02
## [1407,] 0.397531973 0.3493462791 1.023338e-01
## [1408,] 0.426168977 0.1482326877 1.718640e-02
## [1409,] 0.179450170 0.4386559699 3.574234e-01
## [1410,] 0.424154946 0.3063341278 7.374710e-02
## [1411,] 0.386693968 0.3625255950 1.132892e-01
## [1412,] 0.275519452 0.4362391326 2.302373e-01
## [1413,] 0.362525595 0.3866939680 1.374912e-01
## [1414,] 0.377630828 0.0906313987 7.250512e-03
## [1415,] 0.426168977 0.1482326877 1.718640e-02
## [1416,] 0.349346279 0.3975319727 1.507880e-01
## [1417,] 0.321175019 0.4163379880 1.798991e-01
## [1418,] 0.443086838 0.2436977611 4.467792e-02
## [1419,] 0.426168977 0.1482326877 1.718640e-02
## [1420,] 0.438655970 0.1794501695 2.447048e-02
## [1421,] 0.306334128 0.4241549461 1.957638e-01
## [1422,] 0.179450170 0.4386559699 3.574234e-01
## [1423,] 0.417093250 0.1331148669 1.416116e-02
## [1424,] 0.424154946 0.3063341278 7.374710e-02
## [1425,] 0.000000000 0.0000000000 1.000000e+00
## [1426,] 0.349346279 0.3975319727 1.507880e-01
## [1427,] 0.211473264 0.4440938538 3.108657e-01
## [1428,] 0.417093250 0.1331148669 1.416116e-02
## [1429,] 0.340371253 0.0654560102 4.195898e-03
## [1430,] 0.275519452 0.4362391326 2.302373e-01
## [1431,] 0.275519452 0.4362391326 2.302373e-01
## [1432,] 0.426168977 0.1482326877 1.718640e-02
## [1433,] 0.416337988 0.3211750193 8.258786e-02
## [1434,] 0.275519452 0.4362391326 2.302373e-01
## [1435,] 0.340371253 0.0654560102 4.195898e-03
## [1436,] 0.442218287 0.1953987782 2.877966e-02
## [1437,] 0.275519452 0.4362391326 2.302373e-01
## [1438,] 0.169380014 0.0116813803 2.685375e-04
## [1439,] 0.211473264 0.4440938538 3.108657e-01
## [1440,] 0.377630828 0.0906313987 7.250512e-03
## [1441,] 0.362525595 0.3866939680 1.374912e-01
## [1442,] 0.444093854 0.2114732637 3.356718e-02
## [1443,] 0.291090430 0.4308138364 2.125348e-01
## [1444,] 0.444358195 0.2275981001 3.885821e-02
## [1445,] 0.436239133 0.2755194522 5.800410e-02
## [1446,] 0.054038972 0.3182294988 6.246727e-01
## [1447,] 0.375000000 0.3750000000 1.250000e-01
## [1448,] 0.416337988 0.3211750193 8.258786e-02
## [1449,] 0.440355309 0.2596967205 5.105149e-02
## [1450,] 0.417093250 0.1331148669 1.416116e-02
## [1451,] 0.397531973 0.3493462791 1.023338e-01
## [1452,] 0.204487093 0.0179374643 5.244873e-04
## [1453,] 0.406028666 0.1184250277 1.151354e-02
## [1454,] 0.377630828 0.0906313987 7.250512e-03
## [1455,] 0.306334128 0.4241549461 1.957638e-01
## [1456,] 0.335537578 0.4074384881 1.649156e-01
## [1457,] 0.377630828 0.0906313987 7.250512e-03
## [1458,] 0.406028666 0.1184250277 1.151354e-02
## [1459,] 0.321175019 0.4163379880 1.798991e-01
## [1460,] 0.392899701 0.1042386963 9.218388e-03
## [1461,] 0.362525595 0.3866939680 1.374912e-01
## [1462,] 0.440355309 0.2596967205 5.105149e-02
## [1463,] 0.397531973 0.3493462791 1.023338e-01
## [1464,] 0.442218287 0.1953987782 2.877966e-02
## [1465,] 0.236850055 0.0253767916 9.063140e-04
## [1466,] 0.321175019 0.4163379880 1.798991e-01
## [1467,] 0.444358195 0.2275981001 3.885821e-02
## [1468,] 0.397531973 0.3493462791 1.023338e-01
## [1469,] 0.438655970 0.1794501695 2.447048e-02
## [1470,] 0.211473264 0.4440938538 3.108657e-01
## [1471,] 0.430813836 0.2910904300 6.556091e-02
## [1472,] 0.090631399 0.0030210466 3.356718e-05
## [1473,] 0.318229499 0.0540389715 3.058810e-03
## [1474,] 0.362525595 0.3866939680 1.374912e-01
## [1475,] 0.275519452 0.4362391326 2.302373e-01
## [1476,] 0.046838810 0.0007678494 4.195898e-06
## [1477,] 0.433331375 0.1637029640 2.061445e-02
## [1478,] 0.416337988 0.3211750193 8.258786e-02
## [1479,] 0.306334128 0.4241549461 1.957638e-01
## [1480,] 0.436239133 0.2755194522 5.800410e-02
## [1481,] 0.349346279 0.3975319727 1.507880e-01
## [1482,] 0.386693968 0.3625255950 1.132892e-01
## [1483,] 0.362525595 0.3866939680 1.374912e-01
## [1484,] 0.442218287 0.1953987782 2.877966e-02
## [1485,] 0.444093854 0.2114732637 3.356718e-02
## [1486,] 0.440355309 0.2596967205 5.105149e-02
## [1487,] 0.349346279 0.3975319727 1.507880e-01
## [1488,] 0.349346279 0.3975319727 1.507880e-01
## [1489,] 0.430813836 0.2910904300 6.556091e-02
## [1490,] 0.426168977 0.1482326877 1.718640e-02
## [1491,] 0.430813836 0.2910904300 6.556091e-02
## [1492,] 0.227598100 0.4443581954 2.891855e-01
## [1493,] 0.195398778 0.4422182874 3.336033e-01
## [1494,] 0.375000000 0.3750000000 1.250000e-01
## [1495,] 0.306334128 0.4241549461 1.957638e-01
## [1496,] 0.440355309 0.2596967205 5.105149e-02
## [1497,] 0.360146521 0.0776786613 5.584740e-03
## [1498,] 0.118425028 0.4060286664 4.640328e-01
## [1499,] 0.426168977 0.1482326877 1.718640e-02
## [1500,] 0.440355309 0.2596967205 5.105149e-02
## [1501,] 0.293645732 0.0435030714 2.148300e-03
## [1502,] 0.306334128 0.4241549461 1.957638e-01
## [1503,] 0.424154946 0.3063341278 7.374710e-02
## [1504,] 0.321175019 0.4163379880 1.798991e-01
## [1505,] 0.306334128 0.4241549461 1.957638e-01
## [1506,] 0.179450170 0.4386559699 3.574234e-01
## [1507,] 0.443086838 0.2436977611 4.467792e-02
## [1508,] 0.444358195 0.2275981001 3.885821e-02
## [1509,] 0.291090430 0.4308138364 2.125348e-01
## [1510,] 0.259696720 0.4403553087 2.488965e-01
## [1511,] 0.416337988 0.3211750193 8.258786e-02
## [1512,] 0.340371253 0.0654560102 4.195898e-03
## [1513,] 0.243697761 0.4430868383 2.685375e-01
## [1514,] 0.335537578 0.4074384881 1.649156e-01
## [1515,] 0.392899701 0.1042386963 9.218388e-03
## [1516,] 0.163702964 0.4333313752 3.823512e-01
## [1517,] 0.436239133 0.2755194522 5.800410e-02
## [1518,] 0.377630828 0.0906313987 7.250512e-03
## [1519,] 0.335537578 0.4074384881 1.649156e-01
## [1520,] 0.436239133 0.2755194522 5.800410e-02
## [1521,] 0.259696720 0.4403553087 2.488965e-01
## [1522,] 0.407438488 0.3355375785 9.210835e-02
## [1523,] 0.131453291 0.0066840657 1.132892e-04
## [1524,] 0.426168977 0.1482326877 1.718640e-02
## [1525,] 0.444358195 0.2275981001 3.885821e-02
## [1526,] 0.436239133 0.2755194522 5.800410e-02
## [1527,] 0.000000000 0.0000000000 1.000000e+00
## [1528,] 0.392899701 0.1042386963 9.218388e-03
## [1529,] 0.440355309 0.2596967205 5.105149e-02
## [1530,] 0.442218287 0.1953987782 2.877966e-02
## [1531,] 0.430813836 0.2910904300 6.556091e-02
## [1532,] 0.306334128 0.4241549461 1.957638e-01
## [1533,] 0.416337988 0.3211750193 8.258786e-02
## [1534,] 0.227598100 0.4443581954 2.891855e-01
## [1535,] 0.360146521 0.0776786613 5.584740e-03
## [1536,] 0.360146521 0.0776786613 5.584740e-03
## [1537,] 0.416337988 0.3211750193 8.258786e-02
## [1538,] 0.163702964 0.4333313752 3.823512e-01
## [1539,] 0.275519452 0.4362391326 2.302373e-01
## [1540,] 0.444358195 0.2275981001 3.885821e-02
## [1541,] 0.436239133 0.2755194522 5.800410e-02
## [1542,] 0.397531973 0.3493462791 1.023338e-01
## [1543,] 0.430813836 0.2910904300 6.556091e-02
## [1544,] 0.436239133 0.2755194522 5.800410e-02
## [1545,] 0.362525595 0.3866939680 1.374912e-01
## [1546,] 0.444358195 0.2275981001 3.885821e-02
## [1547,] 0.362525595 0.3866939680 1.374912e-01
## [1548,] 0.211473264 0.4440938538 3.108657e-01
## [1549,] 0.259696720 0.4403553087 2.488965e-01
## [1550,] 0.375000000 0.3750000000 1.250000e-01
## [1551,] 0.417093250 0.1331148669 1.416116e-02
## [1552,] 0.227598100 0.4443581954 2.891855e-01
## [1553,] 0.440355309 0.2596967205 5.105149e-02
## [1554,] 0.417093250 0.1331148669 1.416116e-02
## [1555,] 0.340371253 0.0654560102 4.195898e-03
## [1556,] 0.375000000 0.3750000000 1.250000e-01
## [1557,] 0.349346279 0.3975319727 1.507880e-01
## [1558,] 0.169380014 0.0116813803 2.685375e-04
## [1559,] 0.397531973 0.3493462791 1.023338e-01
## [1560,] 0.227598100 0.4443581954 2.891855e-01
## [1561,] 0.440355309 0.2596967205 5.105149e-02
## [1562,] 0.406028666 0.1184250277 1.151354e-02
## [1563,] 0.444358195 0.2275981001 3.885821e-02
## [1564,] 0.148232688 0.4261689772 4.084119e-01
## [1565,] 0.438655970 0.1794501695 2.447048e-02
## [1566,] 0.195398778 0.4422182874 3.336033e-01
## [1567,] 0.426168977 0.1482326877 1.718640e-02
## [1568,] 0.335537578 0.4074384881 1.649156e-01
## [1569,] 0.417093250 0.1331148669 1.416116e-02
## [1570,] 0.426168977 0.1482326877 1.718640e-02
## [1571,] 0.444358195 0.2275981001 3.885821e-02
## [1572,] 0.227598100 0.4443581954 2.891855e-01
## [1573,] 0.375000000 0.3750000000 1.250000e-01
## [1574,] 0.443086838 0.2436977611 4.467792e-02
## [1575,] 0.375000000 0.3750000000 1.250000e-01
## [1576,] 0.227598100 0.4443581954 2.891855e-01
## [1577,] 0.444358195 0.2275981001 3.885821e-02
## [1578,] 0.163702964 0.4333313752 3.823512e-01
## [1579,] 0.266544426 0.0339238361 1.439193e-03
## [1580,] 0.321175019 0.4163379880 1.798991e-01
## [1581,] 0.204487093 0.0179374643 5.244873e-04
## [1582,] 0.438655970 0.1794501695 2.447048e-02
## [1583,] 0.046838810 0.0007678494 4.195898e-06
## [1584,] 0.430813836 0.2910904300 6.556091e-02
## [1585,] 0.443086838 0.2436977611 4.467792e-02
## [1586,] 0.444093854 0.2114732637 3.356718e-02
## [1587,] 0.163702964 0.4333313752 3.823512e-01
## [1588,] 0.416337988 0.3211750193 8.258786e-02
## [1589,] 0.406028666 0.1184250277 1.151354e-02
## [1590,] 0.442218287 0.1953987782 2.877966e-02
## [1591,] 0.442218287 0.1953987782 2.877966e-02
## [1592,] 0.416337988 0.3211750193 8.258786e-02
## [1593,] 0.424154946 0.3063341278 7.374710e-02
## [1594,] 0.444358195 0.2275981001 3.885821e-02
## [1595,] 0.417093250 0.1331148669 1.416116e-02
## [1596,] 0.433331375 0.1637029640 2.061445e-02
## [1597,] 0.163702964 0.4333313752 3.823512e-01
## [1598,] 0.416337988 0.3211750193 8.258786e-02
## [1599,] 0.440355309 0.2596967205 5.105149e-02
## [1600,] 0.416337988 0.3211750193 8.258786e-02
## [1601,] 0.433331375 0.1637029640 2.061445e-02
## [1602,] 0.335537578 0.4074384881 1.649156e-01
## [1603,] 0.443086838 0.2436977611 4.467792e-02
## [1604,] 0.440355309 0.2596967205 5.105149e-02
## [1605,] 0.386693968 0.3625255950 1.132892e-01
## [1606,] 0.291090430 0.4308138364 2.125348e-01
## [1607,] 0.148232688 0.4261689772 4.084119e-01
## [1608,] 0.360146521 0.0776786613 5.584740e-03
## [1609,] 0.440355309 0.2596967205 5.105149e-02
## [1610,] 0.243697761 0.4430868383 2.685375e-01
## [1611,] 0.426168977 0.1482326877 1.718640e-02
## [1612,] 0.430813836 0.2910904300 6.556091e-02
## [1613,] 0.407438488 0.3355375785 9.210835e-02
## [1614,] 0.397531973 0.3493462791 1.023338e-01
## [1615,] 0.416337988 0.3211750193 8.258786e-02
## [1616,] 0.426168977 0.1482326877 1.718640e-02
## [1617,] 0.406028666 0.1184250277 1.151354e-02
## [1618,] 0.291090430 0.4308138364 2.125348e-01
## [1619,] 0.169380014 0.0116813803 2.685375e-04
## [1620,] 0.426168977 0.1482326877 1.718640e-02
## [1621,] 0.386693968 0.3625255950 1.132892e-01
## [1622,] 0.375000000 0.3750000000 1.250000e-01
## [1623,] 0.397531973 0.3493462791 1.023338e-01
## [1624,] 0.433331375 0.1637029640 2.061445e-02
## [1625,] 0.362525595 0.3866939680 1.374912e-01
## [1626,] 0.291090430 0.4308138364 2.125348e-01
## [1627,] 0.416337988 0.3211750193 8.258786e-02
## [1628,] 0.443086838 0.2436977611 4.467792e-02
## [1629,] 0.397531973 0.3493462791 1.023338e-01
## [1630,] 0.436239133 0.2755194522 5.800410e-02
## [1631,] 0.386693968 0.3625255950 1.132892e-01
## [1632,] 0.375000000 0.3750000000 1.250000e-01
## [1633,] 0.349346279 0.3975319727 1.507880e-01
## [1634,] 0.243697761 0.4430868383 2.685375e-01
## [1635,] 0.406028666 0.1184250277 1.151354e-02
## [1636,] 0.291090430 0.4308138364 2.125348e-01
## [1637,] 0.266544426 0.0339238361 1.439193e-03
## [1638,] 0.033923836 0.2665444262 6.980925e-01
## [1639,] 0.000000000 0.0000000000 0.000000e+00
## [1640,] 0.335537578 0.4074384881 1.649156e-01
## [1641,] 0.349346279 0.3975319727 1.507880e-01
## [1642,] 0.424154946 0.3063341278 7.374710e-02
## [1643,] 0.360146521 0.0776786613 5.584740e-03
## [1644,] 0.386693968 0.3625255950 1.132892e-01
## [1645,] 0.179450170 0.4386559699 3.574234e-01
## [1646,] 0.236850055 0.0253767916 9.063140e-04
## [1647,] 0.386693968 0.3625255950 1.132892e-01
## [1648,] 0.306334128 0.4241549461 1.957638e-01
## [1649,] 0.386693968 0.3625255950 1.132892e-01
## [1650,] 0.033923836 0.2665444262 6.980925e-01
## [1651,] 0.377630828 0.0906313987 7.250512e-03
## [1652,] 0.386693968 0.3625255950 1.132892e-01
## [1653,] 0.360146521 0.0776786613 5.584740e-03
## [1654,] 0.443086838 0.2436977611 4.467792e-02
## [1655,] 0.335537578 0.4074384881 1.649156e-01
## [1656,] 0.407438488 0.3355375785 9.210835e-02
## [1657,] 0.424154946 0.3063341278 7.374710e-02
## [1658,] 0.443086838 0.2436977611 4.467792e-02
## [1659,] 0.392899701 0.1042386963 9.218388e-03
## [1660,] 0.046838810 0.0007678494 4.195898e-06
## [1661,] 0.430813836 0.2910904300 6.556091e-02
## [1662,] 0.275519452 0.4362391326 2.302373e-01
## [1663,] 0.291090430 0.4308138364 2.125348e-01
## [1664,] 0.436239133 0.2755194522 5.800410e-02
## [1665,] 0.318229499 0.0540389715 3.058810e-03
## [1666,] 0.426168977 0.1482326877 1.718640e-02
## [1667,] 0.397531973 0.3493462791 1.023338e-01
## [1668,] 0.417093250 0.1331148669 1.416116e-02
## [1669,] 0.433331375 0.1637029640 2.061445e-02
## [1670,] 0.443086838 0.2436977611 4.467792e-02
## [1671,] 0.397531973 0.3493462791 1.023338e-01
## [1672,] 0.416337988 0.3211750193 8.258786e-02
## [1673,] 0.306334128 0.4241549461 1.957638e-01
## [1674,] 0.440355309 0.2596967205 5.105149e-02
## [1675,] 0.407438488 0.3355375785 9.210835e-02
## [1676,] 0.424154946 0.3063341278 7.374710e-02
## [1677,] 0.424154946 0.3063341278 7.374710e-02
## [1678,] 0.407438488 0.3355375785 9.210835e-02
## [1679,] 0.444093854 0.2114732637 3.356718e-02
## [1680,] 0.417093250 0.1331148669 1.416116e-02
## [1681,] 0.335537578 0.4074384881 1.649156e-01
## [1682,] 0.417093250 0.1331148669 1.416116e-02
## [1683,] 0.406028666 0.1184250277 1.151354e-02
## [1684,] 0.444358195 0.2275981001 3.885821e-02
## [1685,] 0.438655970 0.1794501695 2.447048e-02
## [1686,] 0.442218287 0.1953987782 2.877966e-02
## [1687,] 0.443086838 0.2436977611 4.467792e-02
## [1688,] 0.275519452 0.4362391326 2.302373e-01
## [1689,] 0.375000000 0.3750000000 1.250000e-01
## [1690,] 0.406028666 0.1184250277 1.151354e-02
## [1691,] 0.386693968 0.3625255950 1.132892e-01
## [1692,] 0.386693968 0.3625255950 1.132892e-01
## [1693,] 0.406028666 0.1184250277 1.151354e-02
## [1694,] 0.377630828 0.0906313987 7.250512e-03
## [1695,] 0.417093250 0.1331148669 1.416116e-02
## [1696,] 0.275519452 0.4362391326 2.302373e-01
## [1697,] 0.407438488 0.3355375785 9.210835e-02
## [1698,] 0.375000000 0.3750000000 1.250000e-01
## [1699,] 0.442218287 0.1953987782 2.877966e-02
## [1700,] 0.321175019 0.4163379880 1.798991e-01
## [1701,] 0.275519452 0.4362391326 2.302373e-01
## [1702,] 0.275519452 0.4362391326 2.302373e-01
## [1703,] 0.386693968 0.3625255950 1.132892e-01
## [1704,] 0.397531973 0.3493462791 1.023338e-01
## [1705,] 0.335537578 0.4074384881 1.649156e-01
## [1706,] 0.443086838 0.2436977611 4.467792e-02
## [1707,] 0.433331375 0.1637029640 2.061445e-02
## [1708,] 0.443086838 0.2436977611 4.467792e-02
## [1709,] 0.169380014 0.0116813803 2.685375e-04
## [1710,] 0.386693968 0.3625255950 1.132892e-01
## [1711,] 0.443086838 0.2436977611 4.467792e-02
## [1712,] 0.416337988 0.3211750193 8.258786e-02
## [1713,] 0.377630828 0.0906313987 7.250512e-03
## [1714,] 0.407438488 0.3355375785 9.210835e-02
## [1715,] 0.406028666 0.1184250277 1.151354e-02
## [1716,] 0.321175019 0.4163379880 1.798991e-01
## [1717,] 0.406028666 0.1184250277 1.151354e-02
## [1718,] 0.444358195 0.2275981001 3.885821e-02
## [1719,] 0.349346279 0.3975319727 1.507880e-01
## [1720,] 0.443086838 0.2436977611 4.467792e-02
## [1721,] 0.118425028 0.4060286664 4.640328e-01
## [1722,] 0.443086838 0.2436977611 4.467792e-02
## [1723,] 0.335537578 0.4074384881 1.649156e-01
## [1724,] 0.406028666 0.1184250277 1.151354e-02
## [1725,] 0.416337988 0.3211750193 8.258786e-02
## [1726,] 0.442218287 0.1953987782 2.877966e-02
## [1727,] 0.375000000 0.3750000000 1.250000e-01
## [1728,] 0.321175019 0.4163379880 1.798991e-01
## [1729,] 0.118425028 0.4060286664 4.640328e-01
## [1730,] 0.440355309 0.2596967205 5.105149e-02
## [1731,] 0.306334128 0.4241549461 1.957638e-01
## [1732,] 0.236850055 0.0253767916 9.063140e-04
## [1733,] 0.179450170 0.4386559699 3.574234e-01
## [1734,] 0.163702964 0.4333313752 3.823512e-01
## [1735,] 0.293645732 0.0435030714 2.148300e-03
## [1736,] 0.416337988 0.3211750193 8.258786e-02
## [1737,] 0.204487093 0.0179374643 5.244873e-04
## [1738,] 0.392899701 0.1042386963 9.218388e-03
## [1739,] 0.430813836 0.2910904300 6.556091e-02
## [1740,] 0.386693968 0.3625255950 1.132892e-01
## [1741,] 0.291090430 0.4308138364 2.125348e-01
## [1742,] 0.386693968 0.3625255950 1.132892e-01
## [1743,] 0.163702964 0.4333313752 3.823512e-01
## [1744,] 0.259696720 0.4403553087 2.488965e-01
## [1745,] 0.077678661 0.3601465208 5.565901e-01
## [1746,] 0.392899701 0.1042386963 9.218388e-03
## [1747,] 0.444093854 0.2114732637 3.356718e-02
## [1748,] 0.424154946 0.3063341278 7.374710e-02
## [1749,] 0.392899701 0.1042386963 9.218388e-03
## [1750,] 0.375000000 0.3750000000 1.250000e-01
## [1751,] 0.293645732 0.0435030714 2.148300e-03
## [1752,] 0.377630828 0.0906313987 7.250512e-03
## [1753,] 0.443086838 0.2436977611 4.467792e-02
## [1754,] 0.424154946 0.3063341278 7.374710e-02
## [1755,] 0.133114867 0.4170932496 4.356307e-01
## [1756,] 0.306334128 0.4241549461 1.957638e-01
## [1757,] 0.275519452 0.4362391326 2.302373e-01
## [1758,] 0.442218287 0.1953987782 2.877966e-02
## [1759,] 0.407438488 0.3355375785 9.210835e-02
## [1760,] 0.442218287 0.1953987782 2.877966e-02
## [1761,] 0.243697761 0.4430868383 2.685375e-01
## [1762,] 0.349346279 0.3975319727 1.507880e-01
## [1763,] 0.436239133 0.2755194522 5.800410e-02
## [1764,] 0.407438488 0.3355375785 9.210835e-02
## [1765,] 0.430813836 0.2910904300 6.556091e-02
## [1766,] 0.397531973 0.3493462791 1.023338e-01
## [1767,] 0.424154946 0.3063341278 7.374710e-02
## [1768,] 0.438655970 0.1794501695 2.447048e-02
## [1769,] 0.360146521 0.0776786613 5.584740e-03
## [1770,] 0.090631399 0.0030210466 3.356718e-05
## [1771,] 0.406028666 0.1184250277 1.151354e-02
## [1772,] 0.438655970 0.1794501695 2.447048e-02
## [1773,] 0.392899701 0.1042386963 9.218388e-03
## [1774,] 0.340371253 0.0654560102 4.195898e-03
## [1775,] 0.436239133 0.2755194522 5.800410e-02
## [1776,] 0.148232688 0.4261689772 4.084119e-01
## [1777,] 0.442218287 0.1953987782 2.877966e-02
## [1778,] 0.377630828 0.0906313987 7.250512e-03
## [1779,] 0.293645732 0.0435030714 2.148300e-03
## [1780,] 0.424154946 0.3063341278 7.374710e-02
## [1781,] 0.386693968 0.3625255950 1.132892e-01
## [1782,] 0.321175019 0.4163379880 1.798991e-01
## [1783,] 0.436239133 0.2755194522 5.800410e-02
## [1784,] 0.266544426 0.0339238361 1.439193e-03
## [1785,] 0.335537578 0.4074384881 1.649156e-01
## [1786,] 0.444093854 0.2114732637 3.356718e-02
## [1787,] 0.360146521 0.0776786613 5.584740e-03
## [1788,] 0.259696720 0.4403553087 2.488965e-01
## [1789,] 0.362525595 0.3866939680 1.374912e-01
## [1790,] 0.204487093 0.0179374643 5.244873e-04
## [1791,] 0.195398778 0.4422182874 3.336033e-01
## [1792,] 0.065456010 0.3403712531 5.899768e-01
## [1793,] 0.227598100 0.4443581954 2.891855e-01
## [1794,] 0.266544426 0.0339238361 1.439193e-03
## [1795,] 0.386693968 0.3625255950 1.132892e-01
## [1796,] 0.335537578 0.4074384881 1.649156e-01
## [1797,] 0.424154946 0.3063341278 7.374710e-02
## [1798,] 0.430813836 0.2910904300 6.556091e-02
## [1799,] 0.349346279 0.3975319727 1.507880e-01
## [1800,] 0.430813836 0.2910904300 6.556091e-02
## [1801,] 0.340371253 0.0654560102 4.195898e-03
## [1802,] 0.306334128 0.4241549461 1.957638e-01
## [1803,] 0.438655970 0.1794501695 2.447048e-02
## [1804,] 0.054038972 0.3182294988 6.246727e-01
## [1805,] 0.204487093 0.0179374643 5.244873e-04
## [1806,] 0.436239133 0.2755194522 5.800410e-02
## [1807,] 0.318229499 0.0540389715 3.058810e-03
## [1808,] 0.360146521 0.0776786613 5.584740e-03
## [1809,] 0.440355309 0.2596967205 5.105149e-02
## [1810,] 0.169380014 0.0116813803 2.685375e-04
## [1811,] 0.444358195 0.2275981001 3.885821e-02
## [1812,] 0.375000000 0.3750000000 1.250000e-01
## [1813,] 0.436239133 0.2755194522 5.800410e-02
## [1814,] 0.291090430 0.4308138364 2.125348e-01
## [1815,] 0.397531973 0.3493462791 1.023338e-01
## [1816,] 0.377630828 0.0906313987 7.250512e-03
## [1817,] 0.275519452 0.4362391326 2.302373e-01
## [1818,] 0.430813836 0.2910904300 6.556091e-02
## [1819,] 0.433331375 0.1637029640 2.061445e-02
## [1820,] 0.243697761 0.4430868383 2.685375e-01
## [1821,] 0.077678661 0.3601465208 5.565901e-01
## [1822,] 0.090631399 0.3776308281 5.244873e-01
## [1823,] 0.335537578 0.4074384881 1.649156e-01
## [1824,] 0.118425028 0.4060286664 4.640328e-01
## [1825,] 0.377630828 0.0906313987 7.250512e-03
## [1826,] 0.430813836 0.2910904300 6.556091e-02
## [1827,] 0.306334128 0.4241549461 1.957638e-01
## [1828,] 0.442218287 0.1953987782 2.877966e-02
## [1829,] 0.407438488 0.3355375785 9.210835e-02
## [1830,] 0.321175019 0.4163379880 1.798991e-01
## [1831,] 0.392899701 0.1042386963 9.218388e-03
## [1832,] 0.000000000 0.0000000000 0.000000e+00
## [1833,] 0.375000000 0.3750000000 1.250000e-01
## [1834,] 0.443086838 0.2436977611 4.467792e-02
## [1835,] 0.433331375 0.1637029640 2.061445e-02
## [1836,] 0.407438488 0.3355375785 9.210835e-02
## [1837,] 0.443086838 0.2436977611 4.467792e-02
## [1838,] 0.444358195 0.2275981001 3.885821e-02
## [1839,] 0.436239133 0.2755194522 5.800410e-02
## [1840,] 0.442218287 0.1953987782 2.877966e-02
## [1841,] 0.243697761 0.4430868383 2.685375e-01
## [1842,] 0.443086838 0.2436977611 4.467792e-02
## [1843,] 0.318229499 0.0540389715 3.058810e-03
## [1844,] 0.392899701 0.1042386963 9.218388e-03
## [1845,] 0.424154946 0.3063341278 7.374710e-02
## [1846,] 0.444093854 0.2114732637 3.356718e-02
## [1847,] 0.426168977 0.1482326877 1.718640e-02
## [1848,] 0.440355309 0.2596967205 5.105149e-02
## [1849,] 0.090631399 0.0030210466 3.356718e-05
## [1850,] 0.444093854 0.2114732637 3.356718e-02
## [1851,] 0.430813836 0.2910904300 6.556091e-02
## [1852,] 0.362525595 0.3866939680 1.374912e-01
## [1853,] 0.291090430 0.4308138364 2.125348e-01
## [1854,] 0.236850055 0.0253767916 9.063140e-04
## [1855,] 0.440355309 0.2596967205 5.105149e-02
## [1856,] 0.442218287 0.1953987782 2.877966e-02
## [1857,] 0.436239133 0.2755194522 5.800410e-02
## [1858,] 0.266544426 0.0339238361 1.439193e-03
## [1859,] 0.416337988 0.3211750193 8.258786e-02
## [1860,] 0.443086838 0.2436977611 4.467792e-02
## [1861,] 0.430813836 0.2910904300 6.556091e-02
## [1862,] 0.362525595 0.3866939680 1.374912e-01
## [1863,] 0.436239133 0.2755194522 5.800410e-02
## [1864,] 0.046838810 0.0007678494 4.195898e-06
## [1865,] 0.424154946 0.3063341278 7.374710e-02
## [1866,] 0.293645732 0.0435030714 2.148300e-03
## [1867,] 0.306334128 0.4241549461 1.957638e-01
## [1868,] 0.406028666 0.1184250277 1.151354e-02
## [1869,] 0.375000000 0.3750000000 1.250000e-01
## [1870,] 0.433331375 0.1637029640 2.061445e-02
## [1871,] 0.426168977 0.1482326877 1.718640e-02
## [1872,] 0.204487093 0.0179374643 5.244873e-04
## [1873,] 0.211473264 0.4440938538 3.108657e-01
## [1874,] 0.397531973 0.3493462791 1.023338e-01
## [1875,] 0.386693968 0.3625255950 1.132892e-01
## [1876,] 0.433331375 0.1637029640 2.061445e-02
## [1877,] 0.291090430 0.4308138364 2.125348e-01
## [1878,] 0.433331375 0.1637029640 2.061445e-02
## [1879,] 0.442218287 0.1953987782 2.877966e-02
## [1880,] 0.318229499 0.0540389715 3.058810e-03
## [1881,] 0.148232688 0.4261689772 4.084119e-01
## [1882,] 0.293645732 0.0435030714 2.148300e-03
## [1883,] 0.440355309 0.2596967205 5.105149e-02
## [1884,] 0.169380014 0.0116813803 2.685375e-04
## [1885,] 0.407438488 0.3355375785 9.210835e-02
## [1886,] 0.204487093 0.0179374643 5.244873e-04
## [1887,] 0.424154946 0.3063341278 7.374710e-02
## [1888,] 0.090631399 0.0030210466 3.356718e-05
## [1889,] 0.430813836 0.2910904300 6.556091e-02
## [1890,] 0.407438488 0.3355375785 9.210835e-02
## [1891,] 0.417093250 0.1331148669 1.416116e-02
## [1892,] 0.179450170 0.4386559699 3.574234e-01
## [1893,] 0.444093854 0.2114732637 3.356718e-02
## [1894,] 0.407438488 0.3355375785 9.210835e-02
## [1895,] 0.163702964 0.4333313752 3.823512e-01
## [1896,] 0.243697761 0.4430868383 2.685375e-01
## [1897,] 0.204487093 0.0179374643 5.244873e-04
## [1898,] 0.362525595 0.3866939680 1.374912e-01
## [1899,] 0.433331375 0.1637029640 2.061445e-02
## [1900,] 0.444093854 0.2114732637 3.356718e-02
## [1901,] 0.438655970 0.1794501695 2.447048e-02
## [1902,] 0.406028666 0.1184250277 1.151354e-02
## [1903,] 0.440355309 0.2596967205 5.105149e-02
## [1904,] 0.293645732 0.0435030714 2.148300e-03
## [1905,] 0.293645732 0.0435030714 2.148300e-03
## [1906,] 0.266544426 0.0339238361 1.439193e-03
## [1907,] 0.243697761 0.4430868383 2.685375e-01
## [1908,] 0.259696720 0.4403553087 2.488965e-01
## [1909,] 0.377630828 0.0906313987 7.250512e-03
## [1910,] 0.424154946 0.3063341278 7.374710e-02
## [1911,] 0.360146521 0.0776786613 5.584740e-03
## [1912,] 0.349346279 0.3975319727 1.507880e-01
## [1913,] 0.442218287 0.1953987782 2.877966e-02
## [1914,] 0.104238696 0.3928997013 4.936432e-01
## [1915,] 0.426168977 0.1482326877 1.718640e-02
## [1916,] 0.362525595 0.3866939680 1.374912e-01
## [1917,] 0.444093854 0.2114732637 3.356718e-02
## [1918,] 0.291090430 0.4308138364 2.125348e-01
## [1919,] 0.444358195 0.2275981001 3.885821e-02
## [1920,] 0.306334128 0.4241549461 1.957638e-01
## [1921,] 0.375000000 0.3750000000 1.250000e-01
## [1922,] 0.444358195 0.2275981001 3.885821e-02
## [1923,] 0.406028666 0.1184250277 1.151354e-02
## [1924,] 0.397531973 0.3493462791 1.023338e-01
## [1925,] 0.443086838 0.2436977611 4.467792e-02
## [1926,] 0.349346279 0.3975319727 1.507880e-01
## [1927,] 0.340371253 0.0654560102 4.195898e-03
## [1928,] 0.291090430 0.4308138364 2.125348e-01
## [1929,] 0.424154946 0.3063341278 7.374710e-02
## [1930,] 0.377630828 0.0906313987 7.250512e-03
## [1931,] 0.443086838 0.2436977611 4.467792e-02
## [1932,] 0.375000000 0.3750000000 1.250000e-01
## [1933,] 0.430813836 0.2910904300 6.556091e-02
## [1934,] 0.424154946 0.3063341278 7.374710e-02
## [1935,] 0.406028666 0.1184250277 1.151354e-02
## [1936,] 0.426168977 0.1482326877 1.718640e-02
## [1937,] 0.438655970 0.1794501695 2.447048e-02
## [1938,] 0.349346279 0.3975319727 1.507880e-01
## [1939,] 0.211473264 0.4440938538 3.108657e-01
## [1940,] 0.438655970 0.1794501695 2.447048e-02
## [1941,] 0.440355309 0.2596967205 5.105149e-02
## [1942,] 0.275519452 0.4362391326 2.302373e-01
## [1943,] 0.424154946 0.3063341278 7.374710e-02
## [1944,] 0.416337988 0.3211750193 8.258786e-02
## [1945,] 0.266544426 0.0339238361 1.439193e-03
## [1946,] 0.335537578 0.4074384881 1.649156e-01
## [1947,] 0.377630828 0.0906313987 7.250512e-03
## [1948,] 0.360146521 0.0776786613 5.584740e-03
## [1949,] 0.204487093 0.0179374643 5.244873e-04
## [1950,] 0.386693968 0.3625255950 1.132892e-01
## [1951,] 0.424154946 0.3063341278 7.374710e-02
## [1952,] 0.349346279 0.3975319727 1.507880e-01
## [1953,] 0.438655970 0.1794501695 2.447048e-02
## [1954,] 0.204487093 0.0179374643 5.244873e-04
## [1955,] 0.349346279 0.3975319727 1.507880e-01
## [1956,] 0.397531973 0.3493462791 1.023338e-01
## [1957,] 0.426168977 0.1482326877 1.718640e-02
## [1958,] 0.426168977 0.1482326877 1.718640e-02
## [1959,] 0.430813836 0.2910904300 6.556091e-02
## [1960,] 0.430813836 0.2910904300 6.556091e-02
## [1961,] 0.227598100 0.4443581954 2.891855e-01
## [1962,] 0.321175019 0.4163379880 1.798991e-01
## [1963,] 0.090631399 0.0030210466 3.356718e-05
## [1964,] 0.443086838 0.2436977611 4.467792e-02
## [1965,] 0.386693968 0.3625255950 1.132892e-01
## [1966,] 0.430813836 0.2910904300 6.556091e-02
## [1967,] 0.275519452 0.4362391326 2.302373e-01
## [1968,] 0.291090430 0.4308138364 2.125348e-01
## [1969,] 0.444093854 0.2114732637 3.356718e-02
## [1970,] 0.335537578 0.4074384881 1.649156e-01
## [1971,] 0.443086838 0.2436977611 4.467792e-02
## [1972,] 0.360146521 0.0776786613 5.584740e-03
## [1973,] 0.444358195 0.2275981001 3.885821e-02
## [1974,] 0.362525595 0.3866939680 1.374912e-01
## [1975,] 0.362525595 0.3866939680 1.374912e-01
## [1976,] 0.259696720 0.4403553087 2.488965e-01
## [1977,] 0.377630828 0.0906313987 7.250512e-03
## [1978,] 0.275519452 0.4362391326 2.302373e-01
## [1979,] 0.104238696 0.3928997013 4.936432e-01
## [1980,] 0.349346279 0.3975319727 1.507880e-01
## [1981,] 0.416337988 0.3211750193 8.258786e-02
## [1982,] 0.306334128 0.4241549461 1.957638e-01
## [1983,] 0.204487093 0.0179374643 5.244873e-04
## [1984,] 0.025376792 0.2368500554 7.368668e-01
## [1985,] 0.442218287 0.1953987782 2.877966e-02
## [1986,] 0.291090430 0.4308138364 2.125348e-01
## [1987,] 0.266544426 0.0339238361 1.439193e-03
## [1988,] 0.118425028 0.4060286664 4.640328e-01
## [1989,] 0.163702964 0.4333313752 3.823512e-01
## [1990,] 0.424154946 0.3063341278 7.374710e-02
## [1991,] 0.406028666 0.1184250277 1.151354e-02
## [1992,] 0.430813836 0.2910904300 6.556091e-02
## [1993,] 0.442218287 0.1953987782 2.877966e-02
## [1994,] 0.293645732 0.0435030714 2.148300e-03
## [1995,] 0.444358195 0.2275981001 3.885821e-02
## [1996,] 0.416337988 0.3211750193 8.258786e-02
## [1997,] 0.443086838 0.2436977611 4.467792e-02
## [1998,] 0.349346279 0.3975319727 1.507880e-01
## [1999,] 0.430813836 0.2910904300 6.556091e-02
## [2000,] 0.335537578 0.4074384881 1.649156e-01
## [2001,] 0.362525595 0.3866939680 1.374912e-01
## [2002,] 0.306334128 0.4241549461 1.957638e-01
## [2003,] 0.340371253 0.0654560102 4.195898e-03
## [2004,] 0.340371253 0.0654560102 4.195898e-03
## [2005,] 0.293645732 0.0435030714 2.148300e-03
## [2006,] 0.416337988 0.3211750193 8.258786e-02
## [2007,] 0.033923836 0.2665444262 6.980925e-01
## [2008,] 0.392899701 0.1042386963 9.218388e-03
## [2009,] 0.443086838 0.2436977611 4.467792e-02
## [2010,] 0.444093854 0.2114732637 3.356718e-02
## [2011,] 0.436239133 0.2755194522 5.800410e-02
## [2012,] 0.362525595 0.3866939680 1.374912e-01
## [2013,] 0.349346279 0.3975319727 1.507880e-01
## [2014,] 0.443086838 0.2436977611 4.467792e-02
## [2015,] 0.266544426 0.0339238361 1.439193e-03
## [2016,] 0.397531973 0.3493462791 1.023338e-01
## [2017,] 0.104238696 0.3928997013 4.936432e-01
## [2018,] 0.424154946 0.3063341278 7.374710e-02
## [2019,] 0.417093250 0.1331148669 1.416116e-02
## [2020,] 0.360146521 0.0776786613 5.584740e-03
## [2021,] 0.318229499 0.0540389715 3.058810e-03
## [2022,] 0.443086838 0.2436977611 4.467792e-02
## [2023,] 0.438655970 0.1794501695 2.447048e-02
## [2024,] 0.386693968 0.3625255950 1.132892e-01
## [2025,] 0.321175019 0.4163379880 1.798991e-01
## [2026,] 0.444093854 0.2114732637 3.356718e-02
## [2027,] 0.065456010 0.3403712531 5.899768e-01
## [2028,] 0.236850055 0.0253767916 9.063140e-04
## [2029,] 0.169380014 0.0116813803 2.685375e-04
## [2030,] 0.360146521 0.0776786613 5.584740e-03
## [2031,] 0.444093854 0.2114732637 3.356718e-02
## [2032,] 0.054038972 0.3182294988 6.246727e-01
## [2033,] 0.406028666 0.1184250277 1.151354e-02
## [2034,] 0.406028666 0.1184250277 1.151354e-02
## [2035,] 0.417093250 0.1331148669 1.416116e-02
## [2036,] 0.438655970 0.1794501695 2.447048e-02
## [2037,] 0.407438488 0.3355375785 9.210835e-02
## [2038,] 0.227598100 0.4443581954 2.891855e-01
## [2039,] 0.377630828 0.0906313987 7.250512e-03
## [2040,] 0.306334128 0.4241549461 1.957638e-01
## [2041,] 0.392899701 0.1042386963 9.218388e-03
## [2042,] 0.426168977 0.1482326877 1.718640e-02
## [2043,] 0.397531973 0.3493462791 1.023338e-01
## [2044,] 0.360146521 0.0776786613 5.584740e-03
## [2045,] 0.243697761 0.4430868383 2.685375e-01
## [2046,] 0.440355309 0.2596967205 5.105149e-02
## [2047,] 0.275519452 0.4362391326 2.302373e-01
## [2048,] 0.335537578 0.4074384881 1.649156e-01
## [2049,] 0.321175019 0.4163379880 1.798991e-01
## [2050,] 0.442218287 0.1953987782 2.877966e-02
## [2051,] 0.433331375 0.1637029640 2.061445e-02
## [2052,] 0.443086838 0.2436977611 4.467792e-02
## [2053,] 0.306334128 0.4241549461 1.957638e-01
## [2054,] 0.442218287 0.1953987782 2.877966e-02
## [2055,] 0.444358195 0.2275981001 3.885821e-02
## [2056,] 0.397531973 0.3493462791 1.023338e-01
## [2057,] 0.349346279 0.3975319727 1.507880e-01
## [2058,] 0.397531973 0.3493462791 1.023338e-01
## [2059,] 0.340371253 0.0654560102 4.195898e-03
## [2060,] 0.133114867 0.4170932496 4.356307e-01
## [2061,] 0.436239133 0.2755194522 5.800410e-02
## [2062,] 0.243697761 0.4430868383 2.685375e-01
## [2063,] 0.375000000 0.3750000000 1.250000e-01
## [2064,] 0.424154946 0.3063341278 7.374710e-02
## [2065,] 0.386693968 0.3625255950 1.132892e-01
## [2066,] 0.436239133 0.2755194522 5.800410e-02
## [2067,] 0.377630828 0.0906313987 7.250512e-03
## [2068,] 0.392899701 0.1042386963 9.218388e-03
## [2069,] 0.360146521 0.0776786613 5.584740e-03
## [2070,] 0.442218287 0.1953987782 2.877966e-02
## [2071,] 0.275519452 0.4362391326 2.302373e-01
## [2072,] 0.424154946 0.3063341278 7.374710e-02
## [2073,] 0.266544426 0.0339238361 1.439193e-03
## [2074,] 0.392899701 0.1042386963 9.218388e-03
## [2075,] 0.349346279 0.3975319727 1.507880e-01
## [2076,] 0.266544426 0.0339238361 1.439193e-03
## [2077,] 0.362525595 0.3866939680 1.374912e-01
## [2078,] 0.377630828 0.0906313987 7.250512e-03
## [2079,] 0.443086838 0.2436977611 4.467792e-02
## [2080,] 0.426168977 0.1482326877 1.718640e-02
## [2081,] 0.436239133 0.2755194522 5.800410e-02
## [2082,] 0.377630828 0.0906313987 7.250512e-03
## [2083,] 0.293645732 0.0435030714 2.148300e-03
## [2084,] 0.360146521 0.0776786613 5.584740e-03
## [2085,] 0.306334128 0.4241549461 1.957638e-01
## [2086,] 0.349346279 0.3975319727 1.507880e-01
## [2087,] 0.375000000 0.3750000000 1.250000e-01
## [2088,] 0.321175019 0.4163379880 1.798991e-01
## [2089,] 0.443086838 0.2436977611 4.467792e-02
## [2090,] 0.335537578 0.4074384881 1.649156e-01
## [2091,] 0.275519452 0.4362391326 2.302373e-01
## [2092,] 0.377630828 0.0906313987 7.250512e-03
## [2093,] 0.349346279 0.3975319727 1.507880e-01
## [2094,] 0.406028666 0.1184250277 1.151354e-02
## [2095,] 0.362525595 0.3866939680 1.374912e-01
## [2096,] 0.293645732 0.0435030714 2.148300e-03
## [2097,] 0.392899701 0.1042386963 9.218388e-03
## [2098,] 0.392899701 0.1042386963 9.218388e-03
## [2099,] 0.424154946 0.3063341278 7.374710e-02
## [2100,] 0.377630828 0.0906313987 7.250512e-03
## [2101,] 0.318229499 0.0540389715 3.058810e-03
## [2102,] 0.291090430 0.4308138364 2.125348e-01
## attr(,"degree")
## [1] 3
## attr(,"knots")
## numeric(0)
## attr(,"Boundary.knots")
## [1] 18 80
## attr(,"intercept")
## [1] FALSE
## attr(,"class")
## [1] "bs" "basis" "matrix"
\[ X = 0.71 \times {\rm num 415} + 0.71 \times {\rm num857}\]
\[ Y = 0.71 \times {\rm num 415} - 0.71 \times {\rm num857}\]
X <- 0.71*training$num415 + 0.71*training$num857
Y <- 0.71*training$num415 - 0.71*training$num857
plot(X,Y)
smallSpam <- spam[,c(34,32)]
prComp <- prcomp(smallSpam)
plot(prComp$x[,1],prComp$x[,2])
prComp$rotation
## PC1 PC2
## num415 0.7080625 0.7061498
## num857 0.7061498 -0.7080625
typeColor <- ((spam$type=="spam")*1 + 1)
prComp <- prcomp(log10(spam[,-58]+1))
plot(prComp$x[,1],prComp$x[,2],col=typeColor,xlab="PC1",ylab="PC2")
preProc <- preProcess(log10(spam[,-58]+1),method="pca",pcaComp=2)
spamPC <- predict(preProc,log10(spam[,-58]+1))
plot(spamPC[,1],spamPC[,2],col=typeColor)
preProc <- preProcess(log10(training[,-58]+1),method="pca",pcaComp=2)
trainPC <- predict(preProc,log10(training[,-58]+1))
modelFit <- train(training$type ~ .,method="glm",data=trainPC)
testPC <- predict(preProc,log10(testing[,-58]+1))
confusionMatrix(testing$type,predict(modelFit,testPC))
## Confusion Matrix and Statistics
##
## Reference
## Prediction nonspam spam
## nonspam 645 52
## spam 73 380
##
## Accuracy : 0.8913
## 95% CI : (0.8719, 0.9087)
## No Information Rate : 0.6243
## P-Value [Acc > NIR] : < 2e-16
##
## Kappa : 0.7705
## Mcnemar's Test P-Value : 0.07364
##
## Sensitivity : 0.8983
## Specificity : 0.8796
## Pos Pred Value : 0.9254
## Neg Pred Value : 0.8389
## Prevalence : 0.6243
## Detection Rate : 0.5609
## Detection Prevalence : 0.6061
## Balanced Accuracy : 0.8890
##
## 'Positive' Class : nonspam
##
modelFit <- train(training$type ~ .,method="glm",preProcess="pca",data=training)
confusionMatrix(testing$type,predict(modelFit,testing))
## Confusion Matrix and Statistics
##
## Reference
## Prediction nonspam spam
## nonspam 658 39
## spam 50 403
##
## Accuracy : 0.9226
## 95% CI : (0.9056, 0.9374)
## No Information Rate : 0.6157
## P-Value [Acc > NIR] : <2e-16
##
## Kappa : 0.8372
## Mcnemar's Test P-Value : 0.2891
##
## Sensitivity : 0.9294
## Specificity : 0.9118
## Pos Pred Value : 0.9440
## Neg Pred Value : 0.8896
## Prevalence : 0.6157
## Detection Rate : 0.5722
## Detection Prevalence : 0.6061
## Balanced Accuracy : 0.9206
##
## 'Positive' Class : nonspam
##
Pros: * Easy to implement * Easy to interpret
Cons: * Often poor performance in nonlinear settings
library(caret);data(faithful); set.seed(333)
inTrain <- createDataPartition(y=faithful$waiting,
p=0.5, list=FALSE)
trainFaith <- faithful[inTrain,]; testFaith <- faithful[-inTrain,]
head(trainFaith)
## eruptions waiting
## 1 3.600 79
## 3 3.333 74
## 5 4.533 85
## 6 2.883 55
## 7 4.700 88
## 8 3.600 85
plot(trainFaith$waiting,trainFaith$eruptions,pch=19,col="blue",xlab="Waiting",ylab="Duration")
\[ ED_i = b_0 + b_1 WT_i + e_i \]
lm1 <- lm(eruptions ~ waiting,data=trainFaith)
summary(lm1)
##
## Call:
## lm(formula = eruptions ~ waiting, data = trainFaith)
##
## Residuals:
## Min 1Q Median 3Q Max
## -1.26990 -0.34789 0.03979 0.36589 1.05020
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.792739 0.227869 -7.867 1.04e-12 ***
## waiting 0.073901 0.003148 23.474 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.495 on 135 degrees of freedom
## Multiple R-squared: 0.8032, Adjusted R-squared: 0.8018
## F-statistic: 551 on 1 and 135 DF, p-value: < 2.2e-16
plot(trainFaith$waiting,trainFaith$eruptions,pch=19,col="blue",xlab="Waiting",ylab="Duration")
lines(trainFaith$waiting,lm1$fitted,lwd=3)
\[\hat{ED} = \hat{b}_0 + \hat{b}_1 WT\]
coef(lm1)[1] + coef(lm1)[2]*80
## (Intercept)
## 4.119307
newdata <- data.frame(waiting=80)
predict(lm1,newdata)
## 1
## 4.119307
par(mfrow=c(1,2))
plot(trainFaith$waiting,trainFaith$eruptions,pch=19,col="blue",xlab="Waiting",ylab="Duration")
lines(trainFaith$waiting,predict(lm1),lwd=3)
plot(testFaith$waiting,testFaith$eruptions,pch=19,col="blue",xlab="Waiting",ylab="Duration")
lines(testFaith$waiting,predict(lm1,newdata=testFaith),lwd=3)
# Calculate RMSE on training
sqrt(sum((lm1$fitted-trainFaith$eruptions)^2))
## [1] 5.75186
# Calculate RMSE on test
sqrt(sum((predict(lm1,newdata=testFaith)-testFaith$eruptions)^2))
## [1] 5.838559
pred1 <- predict(lm1,newdata=testFaith,interval="prediction")
ord <- order(testFaith$waiting)
plot(testFaith$waiting,testFaith$eruptions,pch=19,col="blue")
matlines(testFaith$waiting[ord],pred1[ord,],type="l",col=c(1,2,2),lty = c(1,1,1), lwd=3)
modFit <- train(eruptions ~ waiting,data=trainFaith,method="lm")
summary(modFit$finalModel)
##
## Call:
## lm(formula = .outcome ~ ., data = dat)
##
## Residuals:
## Min 1Q Median 3Q Max
## -1.26990 -0.34789 0.03979 0.36589 1.05020
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.792739 0.227869 -7.867 1.04e-12 ***
## waiting 0.073901 0.003148 23.474 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.495 on 135 degrees of freedom
## Multiple R-squared: 0.8032, Adjusted R-squared: 0.8018
## F-statistic: 551 on 1 and 135 DF, p-value: < 2.2e-16
Data from: ISLR package from the book: Introduction to statistical learning
library(ISLR); library(ggplot2); library(caret);
data(Wage); Wage <- subset(Wage,select=-c(logwage))
summary(Wage)
## year age sex maritl
## Min. :2003 Min. :18.00 1. Male :3000 1. Never Married: 648
## 1st Qu.:2004 1st Qu.:33.75 2. Female: 0 2. Married :2074
## Median :2006 Median :42.00 3. Widowed : 19
## Mean :2006 Mean :42.41 4. Divorced : 204
## 3rd Qu.:2008 3rd Qu.:51.00 5. Separated : 55
## Max. :2009 Max. :80.00
##
## race education region
## 1. White:2480 1. < HS Grad :268 2. Middle Atlantic :3000
## 2. Black: 293 2. HS Grad :971 1. New England : 0
## 3. Asian: 190 3. Some College :650 3. East North Central: 0
## 4. Other: 37 4. College Grad :685 4. West North Central: 0
## 5. Advanced Degree:426 5. South Atlantic : 0
## 6. East South Central: 0
## (Other) : 0
## jobclass health health_ins
## 1. Industrial :1544 1. <=Good : 858 1. Yes:2083
## 2. Information:1456 2. >=Very Good:2142 2. No : 917
##
##
##
##
##
## wage
## Min. : 20.09
## 1st Qu.: 85.38
## Median :104.92
## Mean :111.70
## 3rd Qu.:128.68
## Max. :318.34
##
inTrain <- createDataPartition(y=Wage$wage,
p=0.7, list=FALSE)
training <- Wage[inTrain,]; testing <- Wage[-inTrain,]
dim(training); dim(testing)
## [1] 2102 11
## [1] 898 11
featurePlot(x=training[,c("age","education","jobclass")],
y = training$wage,
plot="pairs")
qplot(age,wage,data=training)
qplot(age,wage,colour=jobclass,data=training)
qplot(age,wage,colour=education,data=training)
\[ ED_i = b_0 + b_1 age + b_2 I(Jobclass_i="Information") + \sum_{k=1}^4 \gamma_k I(education_i= level k) \]
modFit<- train(wage ~ age + jobclass + education,
method = "lm",data=training)
finMod <- modFit$finalModel
print(modFit)
## Linear Regression
##
## 2102 samples
## 10 predictors
##
## No pre-processing
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 2102, 2102, 2102, 2102, 2102, 2102, ...
## Resampling results:
##
## RMSE Rsquared
## 36.06666 0.2517055
##
##
Education levels: 1 = HS Grad, 2 = Some College, 3 = College Grad, 4 = Advanced Degree
plot(finMod,1,pch=19,cex=0.5,col="#00000010")
qplot(finMod$fitted,finMod$residuals,colour=race,data=training)
plot(finMod$residuals,pch=19)
pred <- predict(modFit, testing)
qplot(wage,pred,colour=year,data=testing)
modFitAll<- train(wage ~ .,data=training,method="lm")
pred <- predict(modFitAll, testing)
qplot(wage,pred,data=testing)
Pros:
Cons:
\[\hat{p}_{mk} = \frac{1}{N_m}\sum_{x_i\; in \; Leaf \; m}\mathbb{1}(y_i = k)\]
Misclassification Error: \[ 1 - \hat{p}_{m k(m)}; k(m) = {\rm most; common; k}\] * 0 = perfect purity * 0.5 = no purity
Gini index: \[ \sum_{k \neq k'} \hat{p}_{mk} \times \hat{p}_{mk'} = \sum_{k=1}^K \hat{p}_{mk}(1-\hat{p}_{mk}) = 1 - \sum_{k=1}^K p_{mk}^2\]
http://en.wikipedia.org/wiki/Decision_tree_learning
Deviance/information gain:
\[ -\sum_{k=1}^K \hat{p}_{mk} \log_2\hat{p}_{mk} \] * 0 = perfect purity * 1 = no purity
http://en.wikipedia.org/wiki/Decision_tree_learning
— &twocol w1:50% w2:50%
*** =left
*** =right
data(iris); library(ggplot2)
names(iris)
## [1] "Sepal.Length" "Sepal.Width" "Petal.Length" "Petal.Width"
## [5] "Species"
table(iris$Species)
##
## setosa versicolor virginica
## 50 50 50
library(caret)
inTrain <- createDataPartition(y=iris$Species,
p=0.7, list=FALSE)
training <- iris[inTrain,]
testing <- iris[-inTrain,]
dim(training); dim(testing)
## [1] 105 5
## [1] 45 5
library(ggplot2)
qplot(Petal.Width,Sepal.Width,colour=Species,data=training)
library(caret)
modFit <- train(Species ~ .,method="rpart",data=training)
print(modFit$finalModel)
## n= 105
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 105 70 setosa (0.33333333 0.33333333 0.33333333)
## 2) Petal.Length< 2.45 35 0 setosa (1.00000000 0.00000000 0.00000000) *
## 3) Petal.Length>=2.45 70 35 versicolor (0.00000000 0.50000000 0.50000000)
## 6) Petal.Width< 1.65 34 1 versicolor (0.00000000 0.97058824 0.02941176) *
## 7) Petal.Width>=1.65 36 2 virginica (0.00000000 0.05555556 0.94444444) *
plot(modFit$finalModel, uniform=TRUE,
main="Classification Tree")
text(modFit$finalModel, use.n=TRUE, all=TRUE, cex=.8)
library(rattle)
fancyRpartPlot(modFit$finalModel)
predict(modFit,newdata=testing)
## [1] setosa setosa setosa setosa setosa setosa
## [7] setosa setosa setosa setosa setosa setosa
## [13] setosa setosa setosa versicolor versicolor versicolor
## [19] versicolor versicolor versicolor versicolor versicolor versicolor
## [25] versicolor versicolor versicolor versicolor versicolor versicolor
## [31] virginica virginica virginica virginica virginica virginica
## [37] versicolor virginica virginica versicolor versicolor virginica
## [43] virginica virginica virginica
## Levels: setosa versicolor virginica
Basic idea:
Notes:
library(ElemStatLearn); data(ozone,package="ElemStatLearn")
##
## Attaching package: 'ElemStatLearn'
## The following object is masked _by_ '.GlobalEnv':
##
## spam
ozone <- ozone[order(ozone$ozone),]
head(ozone)
## ozone radiation temperature wind
## 17 1 8 59 9.7
## 19 4 25 61 9.7
## 14 6 78 57 18.4
## 45 7 48 80 14.3
## 106 7 49 69 10.3
## 7 8 19 61 20.1
http://en.wikipedia.org/wiki/Bootstrap_aggregating
ll <- matrix(NA,nrow=10,ncol=155)
for(i in 1:10){
ss <- sample(1:dim(ozone)[1],replace=T)
ozone0 <- ozone[ss,]; ozone0 <- ozone0[order(ozone0$ozone),]
loess0 <- loess(temperature ~ ozone,data=ozone0,span=0.2)
ll[i,] <- predict(loess0,newdata=data.frame(ozone=1:155))
}
plot(ozone$ozone,ozone$temperature,pch=19,cex=0.5)
for(i in 1:10){lines(1:155,ll[i,],col="grey",lwd=2)}
lines(1:155,apply(ll,2,mean),col="red",lwd=2)
train function consider method optionsbagEarthtreebagbagFDAbag functionlibrary(caret)
predictors = data.frame(ozone=ozone$ozone)
temperature = ozone$temperature
treebag <- bag(predictors, temperature, B = 10,
bagControl = bagControl(fit = ctreeBag$fit,
predict = ctreeBag$pred,
aggregate = ctreeBag$aggregate))
## Warning: executing %dopar% sequentially: no parallel backend registered
http://www.inside-r.org/packages/cran/caret/docs/nbBag
plot(ozone$ozone,temperature,col='lightgrey',pch=19)
points(ozone$ozone,predict(treebag$fits[[1]]$fit,predictors),pch=19,col="red")
points(ozone$ozone,predict(treebag,predictors),pch=19,col="blue")
ctreeBag$fit
## function (x, y, ...)
## {
## loadNamespace("party")
## data <- as.data.frame(x)
## data$y <- y
## party::ctree(y ~ ., data = data)
## }
## <environment: namespace:caret>
ctreeBag$pred
## function (object, x)
## {
## if (!is.data.frame(x))
## x <- as.data.frame(x)
## obsLevels <- levels(object@data@get("response")[, 1])
## if (!is.null(obsLevels)) {
## rawProbs <- party::treeresponse(object, x)
## probMatrix <- matrix(unlist(rawProbs), ncol = length(obsLevels),
## byrow = TRUE)
## out <- data.frame(probMatrix)
## colnames(out) <- obsLevels
## rownames(out) <- NULL
## }
## else out <- unlist(party::treeresponse(object, x))
## out
## }
## <environment: namespace:caret>
ctreeBag$aggregate
## function (x, type = "class")
## {
## if (is.matrix(x[[1]]) | is.data.frame(x[[1]])) {
## pooled <- x[[1]] & NA
## classes <- colnames(pooled)
## for (i in 1:ncol(pooled)) {
## tmp <- lapply(x, function(y, col) y[, col], col = i)
## tmp <- do.call("rbind", tmp)
## pooled[, i] <- apply(tmp, 2, median)
## }
## if (type == "class") {
## out <- factor(classes[apply(pooled, 1, which.max)],
## levels = classes)
## }
## else out <- as.data.frame(pooled)
## }
## else {
## x <- matrix(unlist(x), ncol = length(x))
## out <- apply(x, 1, median)
## }
## out
## }
## <environment: namespace:caret>
Notes:
Further resources:
Pros:
Cons:
data(iris); library(ggplot2); library(caret)
inTrain <- createDataPartition(y=iris$Species,
p=0.7, list=FALSE)
training <- iris[inTrain,]
testing <- iris[-inTrain,]
library(caret)
library(randomForest)
modFit <- train(Species~ .,data=training,method="rf",prox=TRUE)
modFit
## Random Forest
##
## 105 samples
## 4 predictors
## 3 classes: 'setosa', 'versicolor', 'virginica'
##
## No pre-processing
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 105, 105, 105, 105, 105, 105, ...
## Resampling results across tuning parameters:
##
## mtry Accuracy Kappa
## 2 0.9377826 0.9058122
## 3 0.9378352 0.9058753
## 4 0.9327083 0.8981613
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 3.
library(randomForest)
getTree(modFit$finalModel,k=2)
## left daughter right daughter split var split point status prediction
## 1 2 3 4 0.80 1 0
## 2 0 0 0 0.00 -1 1
## 3 4 5 4 1.75 1 0
## 4 6 7 3 5.05 1 0
## 5 8 9 3 5.00 1 0
## 6 0 0 0 0.00 -1 2
## 7 0 0 0 0.00 -1 3
## 8 10 11 1 5.95 1 0
## 9 0 0 0 0.00 -1 3
## 10 0 0 0 0.00 -1 2
## 11 0 0 0 0.00 -1 3
irisP <- classCenter(training[,c(3,4)], training$Species, modFit$finalModel$prox)
irisP <- as.data.frame(irisP); irisP$Species <- rownames(irisP)
p <- qplot(Petal.Width, Petal.Length, col=Species,data=training)
p + geom_point(aes(x=Petal.Width,y=Petal.Length,col=Species),size=5,shape=4,data=irisP)
pred <- predict(modFit,testing); testing$predRight <- pred==testing$Species
table(pred,testing$Species)
##
## pred setosa versicolor virginica
## setosa 15 0 0
## versicolor 0 15 0
## virginica 0 0 15
qplot(Petal.Width,Petal.Length,colour=predRight,data=testing,main="newdata Predictions")
Notes:
Further resources:
http://webee.technion.ac.il/people/rmeir/BoostingTutorial.pdf
Pros:
Cons:
Our goal is to build parametric model for conditional distribution \(P(Y = k | X = x)\)
A typical approach is to apply Bayes theorem: \[ Pr(Y = k | X=x) = \frac{Pr(X=x|Y=k)Pr(Y=k)}{\sum_{\ell=1}^K Pr(X=x |Y = \ell) Pr(Y=\ell)}\] \[Pr(Y = k | X=x) = \frac{f_k(x) \pi_k}{\sum_{\ell = 1}^K f_{\ell}(x) \pi_{\ell}}\]
Typically prior probabilities \(\pi_k\) are set in advance.
A common choice for \(f_k(x) = \frac{1}{\sigma_k \sqrt{2 \pi}}e^{-\frac{(x-\mu_k)^2}{\sigma_k^2}}\), a Gaussian distribution
Estimate the parameters (\(\mu_k\),\(\sigma_k^2\)) from the data.
Classify to the class with the highest value of \(P(Y = k | X = x)\)
A range of models use this approach
http://statweb.stanford.edu/~tibs/ElemStatLearn/
\[log \frac{Pr(Y = k | X=x)}{Pr(Y = j | X=x)}\] \[ = log \frac{f_k(x)}{f_j(x)} + log \frac{\pi_k}{\pi_j}\] \[ = log \frac{\pi_k}{\pi_j} - \frac{1}{2}(\mu_k + \mu_j)^T \Sigma^{-1}(\mu_k + \mu_j)\] \[ + x^T \Sigma^{-1} (\mu_k - \mu_j)\]
http://statweb.stanford.edu/~tibs/ElemStatLearn/
\[\delta_k(x) = x^T \Sigma^{-1} \mu_k - \frac{1}{2}\mu_k \Sigma^{-1}\mu_k + log(\mu_k)\]
Suppose we have many predictors, we would want to model: \(P(Y = k | X_1,\ldots,X_m)\)
We could use Bayes Theorem to get:
\[P(Y = k | X_1,\ldots,X_m) = \frac{\pi_k P(X_1,\ldots,X_m| Y=k)}{\sum_{\ell = 1}^K P(X_1,\ldots,X_m | Y=k) \pi_{\ell}}\] \[ \propto \pi_k P(X_1,\ldots,X_m| Y=k)\]
This can be written:
\[P(X_1,\ldots,X_m, Y=k) = \pi_k P(X_1 | Y = k)P(X_2,\ldots,X_m | X_1,Y=k)\] \[ = \pi_k P(X_1 | Y = k) P(X_2 | X_1, Y=k) P(X_3,\ldots,X_m | X_1,X_2, Y=k)\] \[ = \pi_k P(X_1 | Y = k) P(X_2 | X_1, Y=k)\ldots P(X_m|X_1\ldots,X_{m-1},Y=k)\]
We could make an assumption to write this:
\[ \approx \pi_k P(X_1 | Y = k) P(X_2 | Y = k)\ldots P(X_m |,Y=k)\]
data(iris); library(ggplot2)
names(iris)
## [1] "Sepal.Length" "Sepal.Width" "Petal.Length" "Petal.Width"
## [5] "Species"
table(iris$Species)
##
## setosa versicolor virginica
## 50 50 50
library(caret)
inTrain <- createDataPartition(y=iris$Species,
p=0.7, list=FALSE)
training <- iris[inTrain,]
testing <- iris[-inTrain,]
dim(training); dim(testing)
## [1] 105 5
## [1] 45 5
library(klaR); library(MASS)
## Loading required package: MASS
modlda = train(Species ~ .,data=training,method="lda")
modnb = train(Species ~ ., data=training,method="nb")
plda = predict(modlda,testing); pnb = predict(modnb,testing)
table(plda,pnb)
## pnb
## plda setosa versicolor virginica
## setosa 15 0 0
## versicolor 0 15 0
## virginica 0 1 14
equalPredictions = (plda==pnb)
qplot(Petal.Width,Sepal.Width,colour=equalPredictions,data=testing)
Pros:
Cons:
\[Y = \beta_0 + \beta_1 X_1 + \beta_2 X_2 + \epsilon\]
where \(X_1\) and \(X_2\) are nearly perfectly correlated (co-linear). You can approximate this model by:
\[Y = \beta_0 + (\beta_1 + \beta_2)X_1 + \epsilon\]
The result is:
library(ElemStatLearn); data(prostate)
str(prostate)
## 'data.frame': 97 obs. of 10 variables:
## $ lcavol : num -0.58 -0.994 -0.511 -1.204 0.751 ...
## $ lweight: num 2.77 3.32 2.69 3.28 3.43 ...
## $ age : int 50 58 74 58 62 50 64 58 47 63 ...
## $ lbph : num -1.39 -1.39 -1.39 -1.39 -1.39 ...
## $ svi : int 0 0 0 0 0 0 0 0 0 0 ...
## $ lcp : num -1.39 -1.39 -1.39 -1.39 -1.39 ...
## $ gleason: int 6 6 7 6 6 6 6 6 6 6 ...
## $ pgg45 : int 0 0 20 0 0 0 0 0 0 0 ...
## $ lpsa : num -0.431 -0.163 -0.163 -0.163 0.372 ...
## $ train : logi TRUE TRUE TRUE TRUE TRUE TRUE ...
No method better when data/computation time permits it
http://www.biostat.jhsph.edu/~ririzarr/Teaching/649/ http://www.cbcb.umd.edu/~hcorrada/PracticalML/
Assume \(Y_i = f(X_i) + \epsilon_i\)
\(EPE(\lambda) = E\left[\{Y - \hat{f}_{\lambda}(X)\}^2\right]\)
Suppose \(\hat{f}_{\lambda}\) is the estimate from the training data and look at a new data point \(X = x^*\)
\[E\left[\{Y - \hat{f}_{\lambda}(x^*)\}^2\right] = \sigma^2 + \{E[\hat{f}_{\lambda}(x^*)] - f(x^*)\}^2 + var[\hat{f}_\lambda(x_0)]\]
http://www.biostat.jhsph.edu/~ririzarr/Teaching/649/ http://www.cbcb.umd.edu/~hcorrada/PracticalML/
small = prostate[1:5,]
lm(lpsa ~ .,data =small)
##
## Call:
## lm(formula = lpsa ~ ., data = small)
##
## Coefficients:
## (Intercept) lcavol lweight age lbph
## 9.60615 0.13901 -0.79142 0.09516 NA
## svi lcp gleason pgg45 trainTRUE
## NA NA -2.08710 NA NA
http://www.biostat.jhsph.edu/~ririzarr/Teaching/649/ http://www.cbcb.umd.edu/~hcorrada/PracticalML/
Model \(Y = f(X) + \epsilon\)
Set \(\hat{f}_{\lambda}(x) = x'\beta\)
Constrain only \(\lambda\) coefficients to be nonzero.
Selection problem is after chosing \(\lambda\) figure out which \(p - \lambda\) coefficients to make nonzero
http://www.biostat.jhsph.edu/~ririzarr/Teaching/649/ http://www.cbcb.umd.edu/~hcorrada/PracticalML/
If the \(\beta_j\)’s are unconstrained: * They can explode * And hence are susceptible to very high variance
To control variance, we might regularize/shrink the coefficients.
\[ PRSS(\beta) = \sum_{j=1}^n (Y_j - \sum_{i=1}^m \beta_{1i} X_{ij})^2 + P(\lambda; \beta)\]
where \(PRSS\) is a penalized form of the sum of squares. Things that are commonly looked for
Solve:
\[ \sum_{i=1}^N \left(y_i - \beta_0 + \sum_{j=1}^p x_{ij}\beta_j \right)^2 + \lambda \sum_{j=1}^p \beta_j^2\]
equivalent to solving
\(\sum_{i=1}^N \left(y_i - \beta_0 + \sum_{j=1}^p x_{ij}\beta_j \right)^2\) subject to \(\sum_{j=1}^p \beta_j^2 \leq s\) where \(s\) is inversely proportional to \(\lambda\)
Inclusion of \(\lambda\) makes the problem non-singular even if \(X^TX\) is not invertible.
http://www.biostat.jhsph.edu/~ririzarr/Teaching/649/ http://www.cbcb.umd.edu/~hcorrada/PracticalML/
http://www.biostat.jhsph.edu/~ririzarr/Teaching/649/ http://www.cbcb.umd.edu/~hcorrada/PracticalML/
\(\sum_{i=1}^N \left(y_i - \beta_0 + \sum_{j=1}^p x_{ij}\beta_j \right)^2\) subject to \(\sum_{j=1}^p |\beta_j| \leq s\)
also has a lagrangian form
\[ \sum_{i=1}^N \left(y_i - \beta_0 + \sum_{j=1}^p x_{ij}\beta_j \right)^2 + \lambda \sum_{j=1}^p |\beta_j|\]
For orthonormal design matrices (not the norm!) this has a closed form solution
\[\hat{\beta}_j = sign(\hat{\beta}_j^0)(|\hat{\beta}_j^0 - \gamma)^{+}\]
but not in general.
http://www.biostat.jhsph.edu/~ririzarr/Teaching/649/ http://www.cbcb.umd.edu/~hcorrada/PracticalML/
caret methods are:ridgelassorelaxoSuppose we have 5 completely independent classifiers
If accuracy is 70% for each: * \(10\times(0.7)^3(0.3)^2 + 5\times(0.7)^4(0.3)^2 + (0.7)^5\) * 83.7% majority vote accuracy
With 101 independent classifiers * 99.9% majority vote accuracy
Create training, test and validation sets
library(ISLR); data(Wage); library(ggplot2); library(caret);
Wage <- subset(Wage,select=-c(logwage))
# Create a building data set and validation set
inBuild <- createDataPartition(y=Wage$wage,
p=0.7, list=FALSE)
validation <- Wage[-inBuild,]; buildData <- Wage[inBuild,]
inTrain <- createDataPartition(y=buildData$wage,
p=0.7, list=FALSE)
training <- buildData[inTrain,]; testing <- buildData[-inTrain,]
Create training, test and validation sets
dim(training)
## [1] 1474 11
dim(testing)
## [1] 628 11
dim(validation)
## [1] 898 11
mod1 <- train(wage ~.,method="glm",data=training)
mod2 <- train(wage ~.,method="rf",
data=training,
trControl = trainControl(method="cv"),number=3)
pred1 <- predict(mod1,testing); pred2 <- predict(mod2,testing)
qplot(pred1,pred2,colour=wage,data=testing)
predDF <- data.frame(pred1,pred2,wage=testing$wage)
combModFit <- train(wage ~.,method="gam",data=predDF)
combPred <- predict(combModFit,predDF)
sqrt(sum((pred1-testing$wage)^2))
## [1] 880.3408
sqrt(sum((pred2-testing$wage)^2))
## [1] 916.7846
sqrt(sum((combPred-testing$wage)^2))
## [1] 844.436
pred1V <- predict(mod1,validation); pred2V <- predict(mod2,validation)
predVDF <- data.frame(pred1=pred1V,pred2=pred2V)
combPredV <- predict(combModFit,predVDF)
sqrt(sum((pred1V-validation$wage)^2))
## [1] 981.3656
sqrt(sum((pred2V-validation$wage)^2))
## [1] 1019.503
sqrt(sum((combPredV-validation$wage)^2))
## [1] 987.868
data(iris); library(ggplot2); library(caret)
inTrain <- createDataPartition(y=iris$Species,
p=0.7, list=FALSE)
training <- iris[inTrain,]
testing <- iris[-inTrain,]
dim(training); dim(testing)
## [1] 105 5
## [1] 45 5
kMeans1 <- kmeans(subset(training,select=-c(Species)),centers=3)
training$clusters <- as.factor(kMeans1$cluster)
qplot(Petal.Width,Petal.Length,colour=clusters,data=training)
table(kMeans1$cluster,training$Species)
##
## setosa versicolor virginica
## 1 0 1 26
## 2 0 34 9
## 3 35 0 0
modFit <- train(clusters ~.,data=subset(training,select=-c(Species)),method="rpart")
table(predict(modFit,training),training$Species)
##
## setosa versicolor virginica
## 1 0 0 24
## 2 0 35 11
## 3 35 0 0
testClusterPred <- predict(modFit,testing)
table(testClusterPred ,testing$Species)
##
## testClusterPred setosa versicolor virginica
## 1 0 0 10
## 2 0 15 5
## 3 15 0 0
http://www.google.com/trends/correlate
http://www.newscientist.com/blogs/onepercent/2011/05/google-correlate-passes-our-we.html
library(quantmod); library(forecast)
## Loading required package: xts
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
## Loading required package: TTR
## Version 0.4-0 included new data defaults. See ?getSymbols.
##
## Attaching package: 'quantmod'
## The following object is masked from 'package:Hmisc':
##
## Lag
## Loading required package: timeDate
## This is forecast 7.2
##
## Attaching package: 'forecast'
## The following object is masked from 'package:nlme':
##
## getResponse
from.dat <- as.Date("01/01/08", format="%m/%d/%y")
to.dat <- as.Date("12/31/13", format="%m/%d/%y")
getSymbols("AAPL", src="google", from = from.dat, to = to.dat)
## As of 0.4-0, 'getSymbols' uses env=parent.frame() and
## auto.assign=TRUE by default.
##
## This behavior will be phased out in 0.5-0 when the call will
## default to use auto.assign=FALSE. getOption("getSymbols.env") and
## getOptions("getSymbols.auto.assign") are now checked for alternate defaults
##
## This message is shown once per session and may be disabled by setting
## options("getSymbols.warning4.0"=FALSE). See ?getSymbols for more details.
## [1] "AAPL"
head(AAPL)
## AAPL.Open AAPL.High AAPL.Low AAPL.Close AAPL.Volume
## 2008-01-02 28.47 28.61 27.51 27.83 269794140
## 2008-01-03 27.92 28.20 27.53 27.85 210516460
## 2008-01-04 27.35 27.57 25.56 25.72 363888854
## 2008-01-07 25.89 26.23 24.32 25.38 518047922
## 2008-01-08 25.73 26.07 24.40 24.46 380953888
## 2008-01-09 24.50 25.60 24.00 25.60 453884711
library(xts); library(quantmod)
mAAPL <- to.monthly(AAPL)
googOpen <- Op(mAAPL)
ts1 <- ts(googOpen,frequency=12)
plot(ts1,xlab="Years+1", ylab="GOOG")
https://www.otexts.org/fpp/6/1
plot(decompose(ts1),xlab="Years+1")
ts1Train <- window(ts1,start=1,end=5)
ts1Test <- window(ts1,start=5,end=(7-0.01))
## Warning in window.default(x, ...): 'end' value not changed
ts1Train
## Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec
## 1 28.47 19.46 17.78 20.90 24.99 26.94 23.46 22.84 24.63 15.99 15.13 13.04
## 2 12.27 12.73 12.59 14.87 17.97 19.50 20.50 23.60 24.00 26.48 27.11 28.89
## 3 30.49 27.48 29.39 33.57 37.69 37.10 36.33 37.21 35.35 40.88 43.17 45.04
## 4 46.52 48.76 50.78 50.16 49.96 49.84 47.99 56.83 55.12 54.34 56.77 54.65
## 5 58.49
\[ Y_{t}=\frac{1}{2*k+1}\sum_{j=-k}^k {y_{t+j}}\]
library(forecast)
plot(ts1Train)
lines(ma(ts1Train,order=3),col="red")
Example - simple exponential smoothing \[\hat{y}_{t+1} = \alpha y_t + (1-\alpha)\hat{y}_{t-1}\]
https://www.otexts.org/fpp/7/6
ets1 <- ets(ts1Train,model="MMM")
fcast <- forecast(ets1)
plot(fcast); lines(ts1Test,col="red")
accuracy(fcast,ts1Test)
## ME RMSE MAE MPE MAPE MASE
## Training set -0.4686736 3.052083 2.258242 -1.976207 8.194949 0.1724703
## Test set 0.5203495 19.517108 18.175196 -2.233385 24.308895 1.3881069
## ACF1 Theil's U
## Training set -0.00786701 NA
## Test set 0.92914866 3.282191